]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils.c
use templates instead of gengtype for typed allocation functions
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "cgraph.h"
46 #include "diagnostic.h"
47 #include "timevar.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
51
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
65
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
68
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
73
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
78
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls[(int) ADT_LAST];
81
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
84
85 /* Likewise, but with extra info for each of the possible raise reasons. */
86 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
87
88 /* Forward declarations for handlers of attributes. */
89 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
101
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
105
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table[] =
109 {
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute,
113 false },
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
115 false },
116 { "pure", 0, 0, true, false, false, handle_pure_attribute,
117 false },
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
119 false },
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
121 false },
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
123 false },
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
125 false },
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
127 false },
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
129 false },
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
131 false },
132
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
134 false },
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
136 false },
137 { "may_alias", 0, 0, false, true, false, NULL, false },
138
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
144
145 { NULL, 0, 0, false, false, false, NULL, false }
146 };
147
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
152
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
155
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
158
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
161
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
164
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
167
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
170
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
173
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
176
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
179 goes later. */
180 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
181
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types[NUM_MACHINE_MODES];
184
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
187
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level *chain;
191 /* The BLOCK node for this level. */
192 tree block;
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
195 tree jmpbuf_decl;
196 };
197
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level *current_binding_level;
200
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
203
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context;
206
207 /* An array of global declarations. */
208 static GTY(()) vec<tree, va_gc> *global_decls;
209
210 /* An array of builtin function declarations. */
211 static GTY(()) vec<tree, va_gc> *builtin_decls;
212
213 /* An array of global renaming pointers. */
214 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
215
216 /* A chain of unused BLOCK nodes. */
217 static GTY((deletable)) tree free_block_chain;
218
219 static int pad_type_hash_marked_p (const void *p);
220 static hashval_t pad_type_hash_hash (const void *p);
221 static int pad_type_hash_eq (const void *p1, const void *p2);
222
223 /* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY(()) pad_type_hash {
226 unsigned long hash;
227 tree type;
228 };
229
230 static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash)))
232 htab_t pad_type_hash_table;
233
234 static tree merge_sizes (tree, tree, tree, bool, bool);
235 static tree compute_related_constant (tree, tree);
236 static tree split_plus (tree, tree *);
237 static tree float_type_for_precision (int, enum machine_mode);
238 static tree convert_to_fat_pointer (tree, tree);
239 static unsigned int scale_by_factor_of (tree, unsigned int);
240 static bool potential_alignment_gap (tree, tree, tree);
241 \f
242 /* Initialize data structures of the utils.c module. */
243
244 void
245 init_gnat_utils (void)
246 {
247 /* Initialize the association of GNAT nodes to GCC trees. */
248 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
249
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
252
253 /* Initialize the hash table of padded types. */
254 pad_type_hash_table
255 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
256 }
257
258 /* Destroy data structures of the utils.c module. */
259
260 void
261 destroy_gnat_utils (void)
262 {
263 /* Destroy the association of GNAT nodes to GCC trees. */
264 ggc_free (associate_gnat_to_gnu);
265 associate_gnat_to_gnu = NULL;
266
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table);
269 dummy_node_table = NULL;
270
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table);
273 pad_type_hash_table = NULL;
274
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
277 }
278 \f
279 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
282
283 If GNU_DECL is zero, reset a previous association. */
284
285 void
286 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
287 {
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
289 to something which is a decl. If that is not the case, this usually
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
291 Gigi problem. */
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity)
294 || (!no_check && !DECL_P (gnu_decl)))));
295
296 SET_GNU_TREE (gnat_entity, gnu_decl);
297 }
298
299 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
301
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
304
305 tree
306 get_gnu_tree (Entity_Id gnat_entity)
307 {
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
309 return GET_GNU_TREE (gnat_entity);
310 }
311
312 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
313
314 bool
315 present_gnu_tree (Entity_Id gnat_entity)
316 {
317 return PRESENT_GNU_TREE (gnat_entity);
318 }
319 \f
320 /* Make a dummy type corresponding to GNAT_TYPE. */
321
322 tree
323 make_dummy_type (Entity_Id gnat_type)
324 {
325 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
326 tree gnu_type;
327
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying))
330 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
331
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying))
335 gnat_underlying = gnat_type;
336
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying))
339 return GET_DUMMY_NODE (gnat_underlying);
340
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
342 an ENUMERAL_TYPE. */
343 gnu_type = make_node (Is_Record_Type (gnat_underlying)
344 ? tree_code_for_record_type (gnat_underlying)
345 : ENUMERAL_TYPE);
346 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
347 TYPE_DUMMY_P (gnu_type) = 1;
348 TYPE_STUB_DECL (gnu_type)
349 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
350 if (Is_By_Reference_Type (gnat_underlying))
351 TYPE_BY_REFERENCE_P (gnu_type) = 1;
352
353 SET_DUMMY_NODE (gnat_underlying, gnu_type);
354
355 return gnu_type;
356 }
357
358 /* Return the dummy type that was made for GNAT_TYPE, if any. */
359
360 tree
361 get_dummy_type (Entity_Id gnat_type)
362 {
363 return GET_DUMMY_NODE (gnat_type);
364 }
365
366 /* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
368
369 void
370 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
371 {
372 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
373 tree gnu_fat_type, fields, gnu_object_type;
374
375 gnu_template_type = make_node (RECORD_TYPE);
376 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
377 TYPE_DUMMY_P (gnu_template_type) = 1;
378 gnu_ptr_template = build_pointer_type (gnu_template_type);
379
380 gnu_array_type = make_node (ENUMERAL_TYPE);
381 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
382 TYPE_DUMMY_P (gnu_array_type) = 1;
383 gnu_ptr_array = build_pointer_type (gnu_array_type);
384
385 gnu_fat_type = make_node (RECORD_TYPE);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
387 in gnat_pushdecl. */
388 TYPE_NAME (gnu_fat_type)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
390 gnu_fat_type);
391 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
392 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
393 DECL_CHAIN (fields)
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
395 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type, fields);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
400
401 gnu_object_type = make_node (RECORD_TYPE);
402 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
403 TYPE_DUMMY_P (gnu_object_type) = 1;
404
405 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
407 }
408 \f
409 /* Return true if we are in the global binding level. */
410
411 bool
412 global_bindings_p (void)
413 {
414 return force_global || current_function_decl == NULL_TREE;
415 }
416
417 /* Enter a new binding level. */
418
419 void
420 gnat_pushlevel (void)
421 {
422 struct gnat_binding_level *newlevel = NULL;
423
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level)
426 {
427 newlevel = free_binding_level;
428 free_binding_level = free_binding_level->chain;
429 }
430 else
431 newlevel = ggc_alloc<gnat_binding_level> ();
432
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain)
435 {
436 newlevel->block = free_block_chain;
437 free_block_chain = BLOCK_CHAIN (free_block_chain);
438 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
439 }
440 else
441 newlevel->block = make_node (BLOCK);
442
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level)
445 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
446
447 BLOCK_VARS (newlevel->block) = NULL_TREE;
448 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
449 TREE_USED (newlevel->block) = 1;
450
451 /* Add this level to the front of the chain (stack) of active levels. */
452 newlevel->chain = current_binding_level;
453 newlevel->jmpbuf_decl = NULL_TREE;
454 current_binding_level = newlevel;
455 }
456
457 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
459
460 void
461 set_current_block_context (tree fndecl)
462 {
463 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
464 DECL_INITIAL (fndecl) = current_binding_level->block;
465 set_block_for_group (current_binding_level->block);
466 }
467
468 /* Set the jmpbuf_decl for the current binding level to DECL. */
469
470 void
471 set_block_jmpbuf_decl (tree decl)
472 {
473 current_binding_level->jmpbuf_decl = decl;
474 }
475
476 /* Get the jmpbuf_decl, if any, for the current binding level. */
477
478 tree
479 get_block_jmpbuf_decl (void)
480 {
481 return current_binding_level->jmpbuf_decl;
482 }
483
484 /* Exit a binding level. Set any BLOCK into the current code group. */
485
486 void
487 gnat_poplevel (void)
488 {
489 struct gnat_binding_level *level = current_binding_level;
490 tree block = level->block;
491
492 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
493 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
494
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
497 parent block. Otherwise, add it to the list of its parent. */
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
499 ;
500 else if (BLOCK_VARS (block) == NULL_TREE)
501 {
502 BLOCK_SUBBLOCKS (level->chain->block)
503 = block_chainon (BLOCK_SUBBLOCKS (block),
504 BLOCK_SUBBLOCKS (level->chain->block));
505 BLOCK_CHAIN (block) = free_block_chain;
506 free_block_chain = block;
507 }
508 else
509 {
510 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
511 BLOCK_SUBBLOCKS (level->chain->block) = block;
512 TREE_USED (block) = 1;
513 set_block_for_group (block);
514 }
515
516 /* Free this binding structure. */
517 current_binding_level = level->chain;
518 level->chain = free_binding_level;
519 free_binding_level = level;
520 }
521
522 /* Exit a binding level and discard the associated BLOCK. */
523
524 void
525 gnat_zaplevel (void)
526 {
527 struct gnat_binding_level *level = current_binding_level;
528 tree block = level->block;
529
530 BLOCK_CHAIN (block) = free_block_chain;
531 free_block_chain = block;
532
533 /* Free this binding structure. */
534 current_binding_level = level->chain;
535 level->chain = free_binding_level;
536 free_binding_level = level;
537 }
538 \f
539 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
540
541 static void
542 gnat_set_type_context (tree type, tree context)
543 {
544 tree decl = TYPE_STUB_DECL (type);
545
546 TYPE_CONTEXT (type) = context;
547
548 while (decl && DECL_PARALLEL_TYPE (decl))
549 {
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl)) = context;
551 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
552 }
553 }
554
555 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
557
558 void
559 gnat_pushdecl (tree decl, Node_Id gnat_node)
560 {
561 /* If DECL is public external or at top level, it has global context. */
562 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
563 {
564 if (!global_context)
565 global_context = build_translation_unit_decl (NULL_TREE);
566 DECL_CONTEXT (decl) = global_context;
567 }
568 else
569 {
570 DECL_CONTEXT (decl) = current_function_decl;
571
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
577 DECL_STATIC_CHAIN (decl) = 1;
578 }
579
580 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
581
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node))
584 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
585
586 add_decl_expr (decl, gnat_node);
587
588 /* Put the declaration on the list. The list of declarations is in reverse
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
595 {
596 if (DECL_EXTERNAL (decl))
597 {
598 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
599 vec_safe_push (builtin_decls, decl);
600 }
601 else if (global_bindings_p ())
602 vec_safe_push (global_decls, decl);
603 else
604 {
605 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
606 BLOCK_VARS (current_binding_level->block) = decl;
607 }
608 }
609
610 /* For the declaration of a type, set its name if it either is not already
611 set or if the previous type name was not derived from a source name.
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
620 {
621 tree t = TREE_TYPE (decl);
622
623 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
624 {
625 /* Array and pointer types aren't "tagged" types so we force the
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
628 if (!DECL_ARTIFICIAL (decl)
629 && (TREE_CODE (t) == ARRAY_TYPE
630 || TREE_CODE (t) == POINTER_TYPE))
631 {
632 tree tt = build_distinct_type_copy (t);
633 if (TREE_CODE (t) == POINTER_TYPE)
634 TYPE_NEXT_PTR_TO (t) = tt;
635 TYPE_NAME (tt) = DECL_NAME (decl);
636 gnat_set_type_context (tt, DECL_CONTEXT (decl));
637 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
638 DECL_ORIGINAL_TYPE (decl) = tt;
639 }
640 }
641 else if (TYPE_IS_FAT_POINTER_P (t))
642 {
643 /* We need a variant for the placeholder machinery to work. */
644 tree tt = build_variant_type_copy (t);
645 TYPE_NAME (tt) = decl;
646 gnat_set_type_context (tt, DECL_CONTEXT (decl));
647 TREE_USED (tt) = TREE_USED (t);
648 TREE_TYPE (decl) = tt;
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
650 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
651 else
652 DECL_ORIGINAL_TYPE (decl) = t;
653 DECL_ARTIFICIAL (decl) = 0;
654 t = NULL_TREE;
655 }
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
657 ;
658 else
659 t = NULL_TREE;
660
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
663 if (t)
664 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
665 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
666 {
667 TYPE_NAME (t) = decl;
668 gnat_set_type_context (t, DECL_CONTEXT (decl));
669 }
670 }
671 }
672 \f
673 /* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
678
679 tree
680 make_aligning_type (tree type, unsigned int align, tree size,
681 unsigned int base_align, int room, Node_Id gnat_node)
682 {
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type = make_node (RECORD_TYPE);
687 tree record = build0 (PLACEHOLDER_EXPR, record_type);
688
689 tree record_addr_st
690 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
691
692 /* The diagram below summarizes the shape of what we manipulate:
693
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
699 o o
700 | |
701 record_addr vblock_addr
702
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st = size_int (room);
706 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
707 tree voffset_st, pos, field;
708
709 tree name = TYPE_IDENTIFIER (type);
710
711 name = concat_name (name, "ALIGN");
712 TYPE_NAME (record_type) = name;
713
714 /* Compute VOFFSET and then POS. The next byte position multiple of some
715 alignment after some address is obtained by "and"ing the alignment minus
716 1 with the two's complement of the address. */
717 voffset_st = size_binop (BIT_AND_EXPR,
718 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
719 size_int ((align / BITS_PER_UNIT) - 1));
720
721 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
722 pos = size_binop (MULT_EXPR,
723 convert (bitsizetype,
724 size_binop (PLUS_EXPR, room_st, voffset_st)),
725 bitsize_unit_node);
726
727 /* Craft the GCC record representation. We exceptionally do everything
728 manually here because 1) our generic circuitry is not quite ready to
729 handle the complex position/size expressions we are setting up, 2) we
730 have a strong simplifying factor at hand: we know the maximum possible
731 value of voffset, and 3) we have to set/reset at least the sizes in
732 accordance with this maximum value anyway, as we need them to convey
733 what should be "alloc"ated for this type.
734
735 Use -1 as the 'addressable' indication for the field to prevent the
736 creation of a bitfield. We don't need one, it would have damaging
737 consequences on the alignment computation, and create_field_decl would
738 make one without this special argument, for instance because of the
739 complex position expression. */
740 field = create_field_decl (get_identifier ("F"), type, record_type, size,
741 pos, 1, -1);
742 TYPE_FIELDS (record_type) = field;
743
744 TYPE_ALIGN (record_type) = base_align;
745 TYPE_USER_ALIGN (record_type) = 1;
746
747 TYPE_SIZE (record_type)
748 = size_binop (PLUS_EXPR,
749 size_binop (MULT_EXPR, convert (bitsizetype, size),
750 bitsize_unit_node),
751 bitsize_int (align + room * BITS_PER_UNIT));
752 TYPE_SIZE_UNIT (record_type)
753 = size_binop (PLUS_EXPR, size,
754 size_int (room + align / BITS_PER_UNIT));
755
756 SET_TYPE_MODE (record_type, BLKmode);
757 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
758
759 /* Declare it now since it will never be declared otherwise. This is
760 necessary to ensure that its subtrees are properly marked. */
761 create_type_decl (name, record_type, true, false, gnat_node);
762
763 return record_type;
764 }
765
766 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
767 as the field type of a packed record if IN_RECORD is true, or as the
768 component type of a packed array if IN_RECORD is false. See if we can
769 rewrite it either as a type that has a non-BLKmode, which we can pack
770 tighter in the packed record case, or as a smaller type. If so, return
771 the new type. If not, return the original type. */
772
773 tree
774 make_packable_type (tree type, bool in_record)
775 {
776 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
777 unsigned HOST_WIDE_INT new_size;
778 tree new_type, old_field, field_list = NULL_TREE;
779 unsigned int align;
780
781 /* No point in doing anything if the size is zero. */
782 if (size == 0)
783 return type;
784
785 new_type = make_node (TREE_CODE (type));
786
787 /* Copy the name and flags from the old type to that of the new.
788 Note that we rely on the pointer equality created here for
789 TYPE_NAME to look through conversions in various places. */
790 TYPE_NAME (new_type) = TYPE_NAME (type);
791 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
792 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
793 if (TREE_CODE (type) == RECORD_TYPE)
794 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
795
796 /* If we are in a record and have a small size, set the alignment to
797 try for an integral mode. Otherwise set it to try for a smaller
798 type with BLKmode. */
799 if (in_record && size <= MAX_FIXED_MODE_SIZE)
800 {
801 align = ceil_pow2 (size);
802 TYPE_ALIGN (new_type) = align;
803 new_size = (size + align - 1) & -align;
804 }
805 else
806 {
807 unsigned HOST_WIDE_INT align;
808
809 /* Do not try to shrink the size if the RM size is not constant. */
810 if (TYPE_CONTAINS_TEMPLATE_P (type)
811 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
812 return type;
813
814 /* Round the RM size up to a unit boundary to get the minimal size
815 for a BLKmode record. Give up if it's already the size. */
816 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
817 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
818 if (new_size == size)
819 return type;
820
821 align = new_size & -new_size;
822 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
823 }
824
825 TYPE_USER_ALIGN (new_type) = 1;
826
827 /* Now copy the fields, keeping the position and size as we don't want
828 to change the layout by propagating the packedness downwards. */
829 for (old_field = TYPE_FIELDS (type); old_field;
830 old_field = DECL_CHAIN (old_field))
831 {
832 tree new_field_type = TREE_TYPE (old_field);
833 tree new_field, new_size;
834
835 if (RECORD_OR_UNION_TYPE_P (new_field_type)
836 && !TYPE_FAT_POINTER_P (new_field_type)
837 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
838 new_field_type = make_packable_type (new_field_type, true);
839
840 /* However, for the last field in a not already packed record type
841 that is of an aggregate type, we need to use the RM size in the
842 packable version of the record type, see finish_record_type. */
843 if (!DECL_CHAIN (old_field)
844 && !TYPE_PACKED (type)
845 && RECORD_OR_UNION_TYPE_P (new_field_type)
846 && !TYPE_FAT_POINTER_P (new_field_type)
847 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
848 && TYPE_ADA_SIZE (new_field_type))
849 new_size = TYPE_ADA_SIZE (new_field_type);
850 else
851 new_size = DECL_SIZE (old_field);
852
853 new_field
854 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
855 new_size, bit_position (old_field),
856 TYPE_PACKED (type),
857 !DECL_NONADDRESSABLE_P (old_field));
858
859 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
860 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
861 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
862 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
863
864 DECL_CHAIN (new_field) = field_list;
865 field_list = new_field;
866 }
867
868 finish_record_type (new_type, nreverse (field_list), 2, false);
869 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
870 if (TYPE_STUB_DECL (type))
871 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
872 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
873
874 /* If this is a padding record, we never want to make the size smaller
875 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
876 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
877 {
878 TYPE_SIZE (new_type) = TYPE_SIZE (type);
879 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
880 new_size = size;
881 }
882 else
883 {
884 TYPE_SIZE (new_type) = bitsize_int (new_size);
885 TYPE_SIZE_UNIT (new_type)
886 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
887 }
888
889 if (!TYPE_CONTAINS_TEMPLATE_P (type))
890 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
891
892 compute_record_mode (new_type);
893
894 /* Try harder to get a packable type if necessary, for example
895 in case the record itself contains a BLKmode field. */
896 if (in_record && TYPE_MODE (new_type) == BLKmode)
897 SET_TYPE_MODE (new_type,
898 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
899
900 /* If neither the mode nor the size has shrunk, return the old type. */
901 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
902 return type;
903
904 return new_type;
905 }
906
907 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
908 If TYPE is the best type, return it. Otherwise, make a new type. We
909 only support new integral and pointer types. FOR_BIASED is true if
910 we are making a biased type. */
911
912 tree
913 make_type_from_size (tree type, tree size_tree, bool for_biased)
914 {
915 unsigned HOST_WIDE_INT size;
916 bool biased_p;
917 tree new_type;
918
919 /* If size indicates an error, just return TYPE to avoid propagating
920 the error. Likewise if it's too large to represent. */
921 if (!size_tree || !tree_fits_uhwi_p (size_tree))
922 return type;
923
924 size = tree_to_uhwi (size_tree);
925
926 switch (TREE_CODE (type))
927 {
928 case INTEGER_TYPE:
929 case ENUMERAL_TYPE:
930 case BOOLEAN_TYPE:
931 biased_p = (TREE_CODE (type) == INTEGER_TYPE
932 && TYPE_BIASED_REPRESENTATION_P (type));
933
934 /* Integer types with precision 0 are forbidden. */
935 if (size == 0)
936 size = 1;
937
938 /* Only do something if the type isn't a packed array type and doesn't
939 already have the proper size and the size isn't too large. */
940 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
941 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
942 || size > LONG_LONG_TYPE_SIZE)
943 break;
944
945 biased_p |= for_biased;
946 if (TYPE_UNSIGNED (type) || biased_p)
947 new_type = make_unsigned_type (size);
948 else
949 new_type = make_signed_type (size);
950 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
951 SET_TYPE_RM_MIN_VALUE (new_type,
952 convert (TREE_TYPE (new_type),
953 TYPE_MIN_VALUE (type)));
954 SET_TYPE_RM_MAX_VALUE (new_type,
955 convert (TREE_TYPE (new_type),
956 TYPE_MAX_VALUE (type)));
957 /* Copy the name to show that it's essentially the same type and
958 not a subrange type. */
959 TYPE_NAME (new_type) = TYPE_NAME (type);
960 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
961 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
962 return new_type;
963
964 case RECORD_TYPE:
965 /* Do something if this is a fat pointer, in which case we
966 may need to return the thin pointer. */
967 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
968 {
969 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
970 if (!targetm.valid_pointer_mode (p_mode))
971 p_mode = ptr_mode;
972 return
973 build_pointer_type_for_mode
974 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
975 p_mode, 0);
976 }
977 break;
978
979 case POINTER_TYPE:
980 /* Only do something if this is a thin pointer, in which case we
981 may need to return the fat pointer. */
982 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
983 return
984 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
985 break;
986
987 default:
988 break;
989 }
990
991 return type;
992 }
993
994 /* See if the data pointed to by the hash table slot is marked. */
995
996 static int
997 pad_type_hash_marked_p (const void *p)
998 {
999 const_tree const type = ((const struct pad_type_hash *) p)->type;
1000
1001 return ggc_marked_p (type);
1002 }
1003
1004 /* Return the cached hash value. */
1005
1006 static hashval_t
1007 pad_type_hash_hash (const void *p)
1008 {
1009 return ((const struct pad_type_hash *) p)->hash;
1010 }
1011
1012 /* Return 1 iff the padded types are equivalent. */
1013
1014 static int
1015 pad_type_hash_eq (const void *p1, const void *p2)
1016 {
1017 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1018 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1019 tree type1, type2;
1020
1021 if (t1->hash != t2->hash)
1022 return 0;
1023
1024 type1 = t1->type;
1025 type2 = t2->type;
1026
1027 /* We consider that the padded types are equivalent if they pad the same
1028 type and have the same size, alignment and RM size. Taking the mode
1029 into account is redundant since it is determined by the others. */
1030 return
1031 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1032 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1033 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1034 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1035 }
1036
1037 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1038 if needed. We have already verified that SIZE and TYPE are large enough.
1039 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1040 IS_COMPONENT_TYPE is true if this is being done for the component type of
1041 an array. IS_USER_TYPE is true if the original type needs to be completed.
1042 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1043 the RM size of the resulting type is to be set to SIZE too. */
1044
1045 tree
1046 maybe_pad_type (tree type, tree size, unsigned int align,
1047 Entity_Id gnat_entity, bool is_component_type,
1048 bool is_user_type, bool definition, bool set_rm_size)
1049 {
1050 tree orig_size = TYPE_SIZE (type);
1051 unsigned int orig_align = TYPE_ALIGN (type);
1052 tree record, field;
1053
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type))
1060 {
1061 if ((!size
1062 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1063 && (align == 0 || align == orig_align))
1064 return type;
1065
1066 if (!size)
1067 size = orig_size;
1068 if (align == 0)
1069 align = orig_align;
1070
1071 type = TREE_TYPE (TYPE_FIELDS (type));
1072 orig_size = TYPE_SIZE (type);
1073 orig_align = TYPE_ALIGN (type);
1074 }
1075
1076 /* If the size is either not being changed or is being made smaller (which
1077 is not done here and is only valid for bitfields anyway), show the size
1078 isn't changing. Likewise, clear the alignment if it isn't being
1079 changed. Then return if we aren't doing anything. */
1080 if (size
1081 && (operand_equal_p (size, orig_size, 0)
1082 || (TREE_CODE (orig_size) == INTEGER_CST
1083 && tree_int_cst_lt (size, orig_size))))
1084 size = NULL_TREE;
1085
1086 if (align == orig_align)
1087 align = 0;
1088
1089 if (align == 0 && !size)
1090 return type;
1091
1092 /* If requested, complete the original type and give it a name. */
1093 if (is_user_type)
1094 create_type_decl (get_entity_name (gnat_entity), type,
1095 !Comes_From_Source (gnat_entity),
1096 !(TYPE_NAME (type)
1097 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1098 && DECL_IGNORED_P (TYPE_NAME (type))),
1099 gnat_entity);
1100
1101 /* We used to modify the record in place in some cases, but that could
1102 generate incorrect debugging information. So make a new record
1103 type and name. */
1104 record = make_node (RECORD_TYPE);
1105 TYPE_PADDING_P (record) = 1;
1106
1107 if (Present (gnat_entity))
1108 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1109
1110 TYPE_ALIGN (record) = align ? align : orig_align;
1111 TYPE_SIZE (record) = size ? size : orig_size;
1112 TYPE_SIZE_UNIT (record)
1113 = convert (sizetype,
1114 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1115 bitsize_unit_node));
1116
1117 /* If we are changing the alignment and the input type is a record with
1118 BLKmode and a small constant size, try to make a form that has an
1119 integral mode. This might allow the padding record to also have an
1120 integral mode, which will be much more efficient. There is no point
1121 in doing so if a size is specified unless it is also a small constant
1122 size and it is incorrect to do so if we cannot guarantee that the mode
1123 will be naturally aligned since the field must always be addressable.
1124
1125 ??? This might not always be a win when done for a stand-alone object:
1126 since the nominal and the effective type of the object will now have
1127 different modes, a VIEW_CONVERT_EXPR will be required for converting
1128 between them and it might be hard to overcome afterwards, including
1129 at the RTL level when the stand-alone object is accessed as a whole. */
1130 if (align != 0
1131 && RECORD_OR_UNION_TYPE_P (type)
1132 && TYPE_MODE (type) == BLKmode
1133 && !TYPE_BY_REFERENCE_P (type)
1134 && TREE_CODE (orig_size) == INTEGER_CST
1135 && !TREE_OVERFLOW (orig_size)
1136 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1137 && (!size
1138 || (TREE_CODE (size) == INTEGER_CST
1139 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1140 {
1141 tree packable_type = make_packable_type (type, true);
1142 if (TYPE_MODE (packable_type) != BLKmode
1143 && align >= TYPE_ALIGN (packable_type))
1144 type = packable_type;
1145 }
1146
1147 /* Now create the field with the original size. */
1148 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1149 bitsize_zero_node, 0, 1);
1150 DECL_INTERNAL_P (field) = 1;
1151
1152 /* Do not emit debug info until after the auxiliary record is built. */
1153 finish_record_type (record, field, 1, false);
1154
1155 /* Set the RM size if requested. */
1156 if (set_rm_size)
1157 {
1158 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1159
1160 /* If the padded type is complete and has constant size, we canonicalize
1161 it by means of the hash table. This is consistent with the language
1162 semantics and ensures that gigi and the middle-end have a common view
1163 of these padded types. */
1164 if (TREE_CONSTANT (TYPE_SIZE (record)))
1165 {
1166 hashval_t hashcode;
1167 struct pad_type_hash in, *h;
1168 void **loc;
1169
1170 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1171 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1172 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1173 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1174
1175 in.hash = hashcode;
1176 in.type = record;
1177 h = (struct pad_type_hash *)
1178 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1179 if (h)
1180 {
1181 record = h->type;
1182 goto built;
1183 }
1184
1185 h = ggc_alloc<pad_type_hash> ();
1186 h->hash = hashcode;
1187 h->type = record;
1188 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1189 INSERT);
1190 *loc = (void *)h;
1191 }
1192 }
1193
1194 /* Unless debugging information isn't being written for the input type,
1195 write a record that shows what we are a subtype of and also make a
1196 variable that indicates our size, if still variable. */
1197 if (TREE_CODE (orig_size) != INTEGER_CST
1198 && TYPE_NAME (record)
1199 && TYPE_NAME (type)
1200 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1201 && DECL_IGNORED_P (TYPE_NAME (type))))
1202 {
1203 tree marker = make_node (RECORD_TYPE);
1204 tree name = TYPE_IDENTIFIER (record);
1205 tree orig_name = TYPE_IDENTIFIER (type);
1206
1207 TYPE_NAME (marker) = concat_name (name, "XVS");
1208 finish_record_type (marker,
1209 create_field_decl (orig_name,
1210 build_reference_type (type),
1211 marker, NULL_TREE, NULL_TREE,
1212 0, 0),
1213 0, true);
1214
1215 add_parallel_type (record, marker);
1216
1217 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1218 TYPE_SIZE_UNIT (marker)
1219 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1220 TYPE_SIZE_UNIT (record), false, false, false,
1221 false, NULL, gnat_entity);
1222 }
1223
1224 rest_of_record_type_compilation (record);
1225
1226 built:
1227 /* If the size was widened explicitly, maybe give a warning. Take the
1228 original size as the maximum size of the input if there was an
1229 unconstrained record involved and round it up to the specified alignment,
1230 if one was specified. But don't do it if we are just annotating types
1231 and the type is tagged, since tagged types aren't fully laid out in this
1232 mode. */
1233 if (!size
1234 || TREE_CODE (size) == COND_EXPR
1235 || TREE_CODE (size) == MAX_EXPR
1236 || No (gnat_entity)
1237 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1238 return record;
1239
1240 if (CONTAINS_PLACEHOLDER_P (orig_size))
1241 orig_size = max_size (orig_size, true);
1242
1243 if (align)
1244 orig_size = round_up (orig_size, align);
1245
1246 if (!operand_equal_p (size, orig_size, 0)
1247 && !(TREE_CODE (size) == INTEGER_CST
1248 && TREE_CODE (orig_size) == INTEGER_CST
1249 && (TREE_OVERFLOW (size)
1250 || TREE_OVERFLOW (orig_size)
1251 || tree_int_cst_lt (size, orig_size))))
1252 {
1253 Node_Id gnat_error_node = Empty;
1254
1255 if (Is_Packed_Array_Type (gnat_entity))
1256 gnat_entity = Original_Array_Type (gnat_entity);
1257
1258 if ((Ekind (gnat_entity) == E_Component
1259 || Ekind (gnat_entity) == E_Discriminant)
1260 && Present (Component_Clause (gnat_entity)))
1261 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1262 else if (Present (Size_Clause (gnat_entity)))
1263 gnat_error_node = Expression (Size_Clause (gnat_entity));
1264
1265 /* Generate message only for entities that come from source, since
1266 if we have an entity created by expansion, the message will be
1267 generated for some other corresponding source entity. */
1268 if (Comes_From_Source (gnat_entity))
1269 {
1270 if (Present (gnat_error_node))
1271 post_error_ne_tree ("{^ }bits of & unused?",
1272 gnat_error_node, gnat_entity,
1273 size_diffop (size, orig_size));
1274 else if (is_component_type)
1275 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1276 gnat_entity, gnat_entity,
1277 size_diffop (size, orig_size));
1278 }
1279 }
1280
1281 return record;
1282 }
1283 \f
1284 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1285 If this is a multi-dimensional array type, do this recursively.
1286
1287 OP may be
1288 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1289 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1290 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1291
1292 void
1293 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1294 {
1295 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1296 of a one-dimensional array, since the padding has the same alias set
1297 as the field type, but if it's a multi-dimensional array, we need to
1298 see the inner types. */
1299 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1300 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1301 || TYPE_PADDING_P (gnu_old_type)))
1302 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1303
1304 /* Unconstrained array types are deemed incomplete and would thus be given
1305 alias set 0. Retrieve the underlying array type. */
1306 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1307 gnu_old_type
1308 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1309 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1310 gnu_new_type
1311 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1312
1313 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1314 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1315 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1316 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1317
1318 switch (op)
1319 {
1320 case ALIAS_SET_COPY:
1321 /* The alias set shouldn't be copied between array types with different
1322 aliasing settings because this can break the aliasing relationship
1323 between the array type and its element type. */
1324 #ifndef ENABLE_CHECKING
1325 if (flag_strict_aliasing)
1326 #endif
1327 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1328 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1329 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1330 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1331
1332 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1333 break;
1334
1335 case ALIAS_SET_SUBSET:
1336 case ALIAS_SET_SUPERSET:
1337 {
1338 alias_set_type old_set = get_alias_set (gnu_old_type);
1339 alias_set_type new_set = get_alias_set (gnu_new_type);
1340
1341 /* Do nothing if the alias sets conflict. This ensures that we
1342 never call record_alias_subset several times for the same pair
1343 or at all for alias set 0. */
1344 if (!alias_sets_conflict_p (old_set, new_set))
1345 {
1346 if (op == ALIAS_SET_SUBSET)
1347 record_alias_subset (old_set, new_set);
1348 else
1349 record_alias_subset (new_set, old_set);
1350 }
1351 }
1352 break;
1353
1354 default:
1355 gcc_unreachable ();
1356 }
1357
1358 record_component_aliases (gnu_new_type);
1359 }
1360 \f
1361 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1362 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1363
1364 void
1365 record_builtin_type (const char *name, tree type, bool artificial_p)
1366 {
1367 tree type_decl = build_decl (input_location,
1368 TYPE_DECL, get_identifier (name), type);
1369 DECL_ARTIFICIAL (type_decl) = artificial_p;
1370 TYPE_ARTIFICIAL (type) = artificial_p;
1371 gnat_pushdecl (type_decl, Empty);
1372
1373 if (debug_hooks->type_decl)
1374 debug_hooks->type_decl (type_decl, false);
1375 }
1376 \f
1377 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1378 finish constructing the record type as a fat pointer type. */
1379
1380 void
1381 finish_fat_pointer_type (tree record_type, tree field_list)
1382 {
1383 /* Make sure we can put it into a register. */
1384 if (STRICT_ALIGNMENT)
1385 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1386
1387 /* Show what it really is. */
1388 TYPE_FAT_POINTER_P (record_type) = 1;
1389
1390 /* Do not emit debug info for it since the types of its fields may still be
1391 incomplete at this point. */
1392 finish_record_type (record_type, field_list, 0, false);
1393
1394 /* Force type_contains_placeholder_p to return true on it. Although the
1395 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1396 type but the representation of the unconstrained array. */
1397 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1398 }
1399
1400 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1401 finish constructing the record or union type. If REP_LEVEL is zero, this
1402 record has no representation clause and so will be entirely laid out here.
1403 If REP_LEVEL is one, this record has a representation clause and has been
1404 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1405 this record is derived from a parent record and thus inherits its layout;
1406 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1407 we need to write debug information about this type. */
1408
1409 void
1410 finish_record_type (tree record_type, tree field_list, int rep_level,
1411 bool debug_info_p)
1412 {
1413 enum tree_code code = TREE_CODE (record_type);
1414 tree name = TYPE_IDENTIFIER (record_type);
1415 tree ada_size = bitsize_zero_node;
1416 tree size = bitsize_zero_node;
1417 bool had_size = TYPE_SIZE (record_type) != 0;
1418 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1419 bool had_align = TYPE_ALIGN (record_type) != 0;
1420 tree field;
1421
1422 TYPE_FIELDS (record_type) = field_list;
1423
1424 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1425 generate debug info and have a parallel type. */
1426 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1427
1428 /* Globally initialize the record first. If this is a rep'ed record,
1429 that just means some initializations; otherwise, layout the record. */
1430 if (rep_level > 0)
1431 {
1432 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1433
1434 if (!had_size_unit)
1435 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1436
1437 if (!had_size)
1438 TYPE_SIZE (record_type) = bitsize_zero_node;
1439
1440 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1441 out just like a UNION_TYPE, since the size will be fixed. */
1442 else if (code == QUAL_UNION_TYPE)
1443 code = UNION_TYPE;
1444 }
1445 else
1446 {
1447 /* Ensure there isn't a size already set. There can be in an error
1448 case where there is a rep clause but all fields have errors and
1449 no longer have a position. */
1450 TYPE_SIZE (record_type) = 0;
1451
1452 /* Ensure we use the traditional GCC layout for bitfields when we need
1453 to pack the record type or have a representation clause. The other
1454 possible layout (Microsoft C compiler), if available, would prevent
1455 efficient packing in almost all cases. */
1456 #ifdef TARGET_MS_BITFIELD_LAYOUT
1457 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1458 decl_attributes (&record_type,
1459 tree_cons (get_identifier ("gcc_struct"),
1460 NULL_TREE, NULL_TREE),
1461 ATTR_FLAG_TYPE_IN_PLACE);
1462 #endif
1463
1464 layout_type (record_type);
1465 }
1466
1467 /* At this point, the position and size of each field is known. It was
1468 either set before entry by a rep clause, or by laying out the type above.
1469
1470 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1471 to compute the Ada size; the GCC size and alignment (for rep'ed records
1472 that are not padding types); and the mode (for rep'ed records). We also
1473 clear the DECL_BIT_FIELD indication for the cases we know have not been
1474 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1475
1476 if (code == QUAL_UNION_TYPE)
1477 field_list = nreverse (field_list);
1478
1479 for (field = field_list; field; field = DECL_CHAIN (field))
1480 {
1481 tree type = TREE_TYPE (field);
1482 tree pos = bit_position (field);
1483 tree this_size = DECL_SIZE (field);
1484 tree this_ada_size;
1485
1486 if (RECORD_OR_UNION_TYPE_P (type)
1487 && !TYPE_FAT_POINTER_P (type)
1488 && !TYPE_CONTAINS_TEMPLATE_P (type)
1489 && TYPE_ADA_SIZE (type))
1490 this_ada_size = TYPE_ADA_SIZE (type);
1491 else
1492 this_ada_size = this_size;
1493
1494 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1495 if (DECL_BIT_FIELD (field)
1496 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1497 {
1498 unsigned int align = TYPE_ALIGN (type);
1499
1500 /* In the general case, type alignment is required. */
1501 if (value_factor_p (pos, align))
1502 {
1503 /* The enclosing record type must be sufficiently aligned.
1504 Otherwise, if no alignment was specified for it and it
1505 has been laid out already, bump its alignment to the
1506 desired one if this is compatible with its size. */
1507 if (TYPE_ALIGN (record_type) >= align)
1508 {
1509 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1510 DECL_BIT_FIELD (field) = 0;
1511 }
1512 else if (!had_align
1513 && rep_level == 0
1514 && value_factor_p (TYPE_SIZE (record_type), align))
1515 {
1516 TYPE_ALIGN (record_type) = align;
1517 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1518 DECL_BIT_FIELD (field) = 0;
1519 }
1520 }
1521
1522 /* In the non-strict alignment case, only byte alignment is. */
1523 if (!STRICT_ALIGNMENT
1524 && DECL_BIT_FIELD (field)
1525 && value_factor_p (pos, BITS_PER_UNIT))
1526 DECL_BIT_FIELD (field) = 0;
1527 }
1528
1529 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1530 field is technically not addressable. Except that it can actually
1531 be addressed if it is BLKmode and happens to be properly aligned. */
1532 if (DECL_BIT_FIELD (field)
1533 && !(DECL_MODE (field) == BLKmode
1534 && value_factor_p (pos, BITS_PER_UNIT)))
1535 DECL_NONADDRESSABLE_P (field) = 1;
1536
1537 /* A type must be as aligned as its most aligned field that is not
1538 a bit-field. But this is already enforced by layout_type. */
1539 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1540 TYPE_ALIGN (record_type)
1541 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1542
1543 switch (code)
1544 {
1545 case UNION_TYPE:
1546 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1547 size = size_binop (MAX_EXPR, size, this_size);
1548 break;
1549
1550 case QUAL_UNION_TYPE:
1551 ada_size
1552 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1553 this_ada_size, ada_size);
1554 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1555 this_size, size);
1556 break;
1557
1558 case RECORD_TYPE:
1559 /* Since we know here that all fields are sorted in order of
1560 increasing bit position, the size of the record is one
1561 higher than the ending bit of the last field processed
1562 unless we have a rep clause, since in that case we might
1563 have a field outside a QUAL_UNION_TYPE that has a higher ending
1564 position. So use a MAX in that case. Also, if this field is a
1565 QUAL_UNION_TYPE, we need to take into account the previous size in
1566 the case of empty variants. */
1567 ada_size
1568 = merge_sizes (ada_size, pos, this_ada_size,
1569 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1570 size
1571 = merge_sizes (size, pos, this_size,
1572 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1573 break;
1574
1575 default:
1576 gcc_unreachable ();
1577 }
1578 }
1579
1580 if (code == QUAL_UNION_TYPE)
1581 nreverse (field_list);
1582
1583 if (rep_level < 2)
1584 {
1585 /* If this is a padding record, we never want to make the size smaller
1586 than what was specified in it, if any. */
1587 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1588 size = TYPE_SIZE (record_type);
1589
1590 /* Now set any of the values we've just computed that apply. */
1591 if (!TYPE_FAT_POINTER_P (record_type)
1592 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1593 SET_TYPE_ADA_SIZE (record_type, ada_size);
1594
1595 if (rep_level > 0)
1596 {
1597 tree size_unit = had_size_unit
1598 ? TYPE_SIZE_UNIT (record_type)
1599 : convert (sizetype,
1600 size_binop (CEIL_DIV_EXPR, size,
1601 bitsize_unit_node));
1602 unsigned int align = TYPE_ALIGN (record_type);
1603
1604 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1605 TYPE_SIZE_UNIT (record_type)
1606 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1607
1608 compute_record_mode (record_type);
1609 }
1610 }
1611
1612 if (debug_info_p)
1613 rest_of_record_type_compilation (record_type);
1614 }
1615
1616 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1617
1618 void
1619 add_parallel_type (tree type, tree parallel_type)
1620 {
1621 tree decl = TYPE_STUB_DECL (type);
1622
1623 while (DECL_PARALLEL_TYPE (decl))
1624 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1625
1626 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1627 }
1628
1629 /* Return true if TYPE has a parallel type. */
1630
1631 static bool
1632 has_parallel_type (tree type)
1633 {
1634 tree decl = TYPE_STUB_DECL (type);
1635
1636 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1637 }
1638
1639 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1640 associated with it. It need not be invoked directly in most cases since
1641 finish_record_type takes care of doing so, but this can be necessary if
1642 a parallel type is to be attached to the record type. */
1643
1644 void
1645 rest_of_record_type_compilation (tree record_type)
1646 {
1647 bool var_size = false;
1648 tree field;
1649
1650 /* If this is a padded type, the bulk of the debug info has already been
1651 generated for the field's type. */
1652 if (TYPE_IS_PADDING_P (record_type))
1653 return;
1654
1655 /* If the type already has a parallel type (XVS type), then we're done. */
1656 if (has_parallel_type (record_type))
1657 return;
1658
1659 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1660 {
1661 /* We need to make an XVE/XVU record if any field has variable size,
1662 whether or not the record does. For example, if we have a union,
1663 it may be that all fields, rounded up to the alignment, have the
1664 same size, in which case we'll use that size. But the debug
1665 output routines (except Dwarf2) won't be able to output the fields,
1666 so we need to make the special record. */
1667 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1668 /* If a field has a non-constant qualifier, the record will have
1669 variable size too. */
1670 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1671 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1672 {
1673 var_size = true;
1674 break;
1675 }
1676 }
1677
1678 /* If this record type is of variable size, make a parallel record type that
1679 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1680 if (var_size)
1681 {
1682 tree new_record_type
1683 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? UNION_TYPE : TREE_CODE (record_type));
1685 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1686 tree last_pos = bitsize_zero_node;
1687 tree old_field, prev_old_field = NULL_TREE;
1688
1689 new_name
1690 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1691 ? "XVU" : "XVE");
1692 TYPE_NAME (new_record_type) = new_name;
1693 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1694 TYPE_STUB_DECL (new_record_type)
1695 = create_type_stub_decl (new_name, new_record_type);
1696 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1697 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1698 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1699 TYPE_SIZE_UNIT (new_record_type)
1700 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1701
1702 /* Now scan all the fields, replacing each field with a new field
1703 corresponding to the new encoding. */
1704 for (old_field = TYPE_FIELDS (record_type); old_field;
1705 old_field = DECL_CHAIN (old_field))
1706 {
1707 tree field_type = TREE_TYPE (old_field);
1708 tree field_name = DECL_NAME (old_field);
1709 tree curpos = bit_position (old_field);
1710 tree pos, new_field;
1711 bool var = false;
1712 unsigned int align = 0;
1713
1714 /* We're going to do some pattern matching below so remove as many
1715 conversions as possible. */
1716 curpos = remove_conversions (curpos, true);
1717
1718 /* See how the position was modified from the last position.
1719
1720 There are two basic cases we support: a value was added
1721 to the last position or the last position was rounded to
1722 a boundary and they something was added. Check for the
1723 first case first. If not, see if there is any evidence
1724 of rounding. If so, round the last position and retry.
1725
1726 If this is a union, the position can be taken as zero. */
1727 if (TREE_CODE (new_record_type) == UNION_TYPE)
1728 pos = bitsize_zero_node;
1729 else
1730 pos = compute_related_constant (curpos, last_pos);
1731
1732 if (!pos
1733 && TREE_CODE (curpos) == MULT_EXPR
1734 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1735 {
1736 tree offset = TREE_OPERAND (curpos, 0);
1737 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1738 align = scale_by_factor_of (offset, align);
1739 last_pos = round_up (last_pos, align);
1740 pos = compute_related_constant (curpos, last_pos);
1741 }
1742 else if (!pos
1743 && TREE_CODE (curpos) == PLUS_EXPR
1744 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1745 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1746 && tree_fits_uhwi_p
1747 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1748 {
1749 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1750 unsigned HOST_WIDE_INT addend
1751 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1752 align
1753 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1754 align = scale_by_factor_of (offset, align);
1755 align = MIN (align, addend & -addend);
1756 last_pos = round_up (last_pos, align);
1757 pos = compute_related_constant (curpos, last_pos);
1758 }
1759 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1760 {
1761 align = TYPE_ALIGN (field_type);
1762 last_pos = round_up (last_pos, align);
1763 pos = compute_related_constant (curpos, last_pos);
1764 }
1765
1766 /* If we can't compute a position, set it to zero.
1767
1768 ??? We really should abort here, but it's too much work
1769 to get this correct for all cases. */
1770 if (!pos)
1771 pos = bitsize_zero_node;
1772
1773 /* See if this type is variable-sized and make a pointer type
1774 and indicate the indirection if so. Beware that the debug
1775 back-end may adjust the position computed above according
1776 to the alignment of the field type, i.e. the pointer type
1777 in this case, if we don't preventively counter that. */
1778 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1779 {
1780 field_type = build_pointer_type (field_type);
1781 if (align != 0 && TYPE_ALIGN (field_type) > align)
1782 {
1783 field_type = copy_node (field_type);
1784 TYPE_ALIGN (field_type) = align;
1785 }
1786 var = true;
1787 }
1788
1789 /* Make a new field name, if necessary. */
1790 if (var || align != 0)
1791 {
1792 char suffix[16];
1793
1794 if (align != 0)
1795 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1796 align / BITS_PER_UNIT);
1797 else
1798 strcpy (suffix, "XVL");
1799
1800 field_name = concat_name (field_name, suffix);
1801 }
1802
1803 new_field
1804 = create_field_decl (field_name, field_type, new_record_type,
1805 DECL_SIZE (old_field), pos, 0, 0);
1806 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1807 TYPE_FIELDS (new_record_type) = new_field;
1808
1809 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1810 zero. The only time it's not the last field of the record
1811 is when there are other components at fixed positions after
1812 it (meaning there was a rep clause for every field) and we
1813 want to be able to encode them. */
1814 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1815 (TREE_CODE (TREE_TYPE (old_field))
1816 == QUAL_UNION_TYPE)
1817 ? bitsize_zero_node
1818 : DECL_SIZE (old_field));
1819 prev_old_field = old_field;
1820 }
1821
1822 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1823
1824 add_parallel_type (record_type, new_record_type);
1825 }
1826 }
1827
1828 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1829 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1830 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1831 replace a value of zero with the old size. If HAS_REP is true, we take the
1832 MAX of the end position of this field with LAST_SIZE. In all other cases,
1833 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1834
1835 static tree
1836 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1837 bool has_rep)
1838 {
1839 tree type = TREE_TYPE (last_size);
1840 tree new_size;
1841
1842 if (!special || TREE_CODE (size) != COND_EXPR)
1843 {
1844 new_size = size_binop (PLUS_EXPR, first_bit, size);
1845 if (has_rep)
1846 new_size = size_binop (MAX_EXPR, last_size, new_size);
1847 }
1848
1849 else
1850 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1851 integer_zerop (TREE_OPERAND (size, 1))
1852 ? last_size : merge_sizes (last_size, first_bit,
1853 TREE_OPERAND (size, 1),
1854 1, has_rep),
1855 integer_zerop (TREE_OPERAND (size, 2))
1856 ? last_size : merge_sizes (last_size, first_bit,
1857 TREE_OPERAND (size, 2),
1858 1, has_rep));
1859
1860 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1861 when fed through substitute_in_expr) into thinking that a constant
1862 size is not constant. */
1863 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1864 new_size = TREE_OPERAND (new_size, 0);
1865
1866 return new_size;
1867 }
1868
1869 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1870 related by the addition of a constant. Return that constant if so. */
1871
1872 static tree
1873 compute_related_constant (tree op0, tree op1)
1874 {
1875 tree op0_var, op1_var;
1876 tree op0_con = split_plus (op0, &op0_var);
1877 tree op1_con = split_plus (op1, &op1_var);
1878 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1879
1880 if (operand_equal_p (op0_var, op1_var, 0))
1881 return result;
1882 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1883 return result;
1884 else
1885 return 0;
1886 }
1887
1888 /* Utility function of above to split a tree OP which may be a sum, into a
1889 constant part, which is returned, and a variable part, which is stored
1890 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1891 bitsizetype. */
1892
1893 static tree
1894 split_plus (tree in, tree *pvar)
1895 {
1896 /* Strip conversions in order to ease the tree traversal and maximize the
1897 potential for constant or plus/minus discovery. We need to be careful
1898 to always return and set *pvar to bitsizetype trees, but it's worth
1899 the effort. */
1900 in = remove_conversions (in, false);
1901
1902 *pvar = convert (bitsizetype, in);
1903
1904 if (TREE_CODE (in) == INTEGER_CST)
1905 {
1906 *pvar = bitsize_zero_node;
1907 return convert (bitsizetype, in);
1908 }
1909 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1910 {
1911 tree lhs_var, rhs_var;
1912 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1913 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1914
1915 if (lhs_var == TREE_OPERAND (in, 0)
1916 && rhs_var == TREE_OPERAND (in, 1))
1917 return bitsize_zero_node;
1918
1919 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1920 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1921 }
1922 else
1923 return bitsize_zero_node;
1924 }
1925 \f
1926 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1927 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1928 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1929 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1930 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1931 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1932 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1933 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1934 invisible reference. */
1935
1936 tree
1937 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1938 bool return_unconstrained_p, bool return_by_direct_ref_p,
1939 bool return_by_invisi_ref_p)
1940 {
1941 /* A list of the data type nodes of the subprogram formal parameters.
1942 This list is generated by traversing the input list of PARM_DECL
1943 nodes. */
1944 vec<tree, va_gc> *param_type_list = NULL;
1945 tree t, type;
1946
1947 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1948 vec_safe_push (param_type_list, TREE_TYPE (t));
1949
1950 type = build_function_type_vec (return_type, param_type_list);
1951
1952 /* TYPE may have been shared since GCC hashes types. If it has a different
1953 CICO_LIST, make a copy. Likewise for the various flags. */
1954 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1955 return_by_direct_ref_p, return_by_invisi_ref_p))
1956 {
1957 type = copy_type (type);
1958 TYPE_CI_CO_LIST (type) = cico_list;
1959 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1960 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1961 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1962 }
1963
1964 return type;
1965 }
1966 \f
1967 /* Return a copy of TYPE but safe to modify in any way. */
1968
1969 tree
1970 copy_type (tree type)
1971 {
1972 tree new_type = copy_node (type);
1973
1974 /* Unshare the language-specific data. */
1975 if (TYPE_LANG_SPECIFIC (type))
1976 {
1977 TYPE_LANG_SPECIFIC (new_type) = NULL;
1978 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1979 }
1980
1981 /* And the contents of the language-specific slot if needed. */
1982 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1983 && TYPE_RM_VALUES (type))
1984 {
1985 TYPE_RM_VALUES (new_type) = NULL_TREE;
1986 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1987 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1988 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1989 }
1990
1991 /* copy_node clears this field instead of copying it, because it is
1992 aliased with TREE_CHAIN. */
1993 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1994
1995 TYPE_POINTER_TO (new_type) = 0;
1996 TYPE_REFERENCE_TO (new_type) = 0;
1997 TYPE_MAIN_VARIANT (new_type) = new_type;
1998 TYPE_NEXT_VARIANT (new_type) = 0;
1999
2000 return new_type;
2001 }
2002 \f
2003 /* Return a subtype of sizetype with range MIN to MAX and whose
2004 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2005 of the associated TYPE_DECL. */
2006
2007 tree
2008 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2009 {
2010 /* First build a type for the desired range. */
2011 tree type = build_nonshared_range_type (sizetype, min, max);
2012
2013 /* Then set the index type. */
2014 SET_TYPE_INDEX_TYPE (type, index);
2015 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2016
2017 return type;
2018 }
2019
2020 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2021 sizetype is used. */
2022
2023 tree
2024 create_range_type (tree type, tree min, tree max)
2025 {
2026 tree range_type;
2027
2028 if (type == NULL_TREE)
2029 type = sizetype;
2030
2031 /* First build a type with the base range. */
2032 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2033 TYPE_MAX_VALUE (type));
2034
2035 /* Then set the actual range. */
2036 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2037 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2038
2039 return range_type;
2040 }
2041 \f
2042 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2043 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2044 its data type. */
2045
2046 tree
2047 create_type_stub_decl (tree type_name, tree type)
2048 {
2049 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2050 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2051 emitted in DWARF. */
2052 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2053 DECL_ARTIFICIAL (type_decl) = 1;
2054 TYPE_ARTIFICIAL (type) = 1;
2055 return type_decl;
2056 }
2057
2058 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2059 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2060 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2061 true if we need to write debug information about this type. GNAT_NODE
2062 is used for the position of the decl. */
2063
2064 tree
2065 create_type_decl (tree type_name, tree type, bool artificial_p,
2066 bool debug_info_p, Node_Id gnat_node)
2067 {
2068 enum tree_code code = TREE_CODE (type);
2069 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2070 tree type_decl;
2071
2072 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2073 gcc_assert (!TYPE_IS_DUMMY_P (type));
2074
2075 /* If the type hasn't been named yet, we're naming it; preserve an existing
2076 TYPE_STUB_DECL that has been attached to it for some purpose. */
2077 if (!named && TYPE_STUB_DECL (type))
2078 {
2079 type_decl = TYPE_STUB_DECL (type);
2080 DECL_NAME (type_decl) = type_name;
2081 }
2082 else
2083 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2084
2085 DECL_ARTIFICIAL (type_decl) = artificial_p;
2086 TYPE_ARTIFICIAL (type) = artificial_p;
2087
2088 /* Add this decl to the current binding level. */
2089 gnat_pushdecl (type_decl, gnat_node);
2090
2091 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2092 This causes the name to be also viewed as a "tag" by the debug
2093 back-end, with the advantage that no DW_TAG_typedef is emitted
2094 for artificial "tagged" types in DWARF. */
2095 if (!named)
2096 TYPE_STUB_DECL (type) = type_decl;
2097
2098 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2099 back-end doesn't support, and for others if we don't need to. */
2100 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2101 DECL_IGNORED_P (type_decl) = 1;
2102
2103 return type_decl;
2104 }
2105 \f
2106 /* Return a VAR_DECL or CONST_DECL node.
2107
2108 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2109 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2110 the GCC tree for an optional initial expression; NULL_TREE if none.
2111
2112 CONST_FLAG is true if this variable is constant, in which case we might
2113 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2114
2115 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2116 definition to be made visible outside of the current compilation unit, for
2117 instance variable definitions in a package specification.
2118
2119 EXTERN_FLAG is true when processing an external variable declaration (as
2120 opposed to a definition: no storage is to be allocated for the variable).
2121
2122 STATIC_FLAG is only relevant when not at top level. In that case
2123 it indicates whether to always allocate storage to the variable.
2124
2125 GNAT_NODE is used for the position of the decl. */
2126
2127 tree
2128 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2129 bool const_flag, bool public_flag, bool extern_flag,
2130 bool static_flag, bool const_decl_allowed_p,
2131 struct attrib *attr_list, Node_Id gnat_node)
2132 {
2133 /* Whether the initializer is a constant initializer. At the global level
2134 or for an external object or an object to be allocated in static memory,
2135 we check that it is a valid constant expression for use in initializing
2136 a static variable; otherwise, we only check that it is constant. */
2137 bool init_const
2138 = (var_init != 0
2139 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2140 && (global_bindings_p () || extern_flag || static_flag
2141 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2142 : TREE_CONSTANT (var_init)));
2143
2144 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2145 case the initializer may be used in-lieu of the DECL node (as done in
2146 Identifier_to_gnu). This is useful to prevent the need of elaboration
2147 code when an identifier for which such a decl is made is in turn used as
2148 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2149 but extra constraints apply to this choice (see below) and are not
2150 relevant to the distinction we wish to make. */
2151 bool constant_p = const_flag && init_const;
2152
2153 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2154 and may be used for scalars in general but not for aggregates. */
2155 tree var_decl
2156 = build_decl (input_location,
2157 (constant_p && const_decl_allowed_p
2158 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2159 var_name, type);
2160
2161 /* If this is external, throw away any initializations (they will be done
2162 elsewhere) unless this is a constant for which we would like to remain
2163 able to get the initializer. If we are defining a global here, leave a
2164 constant initialization and save any variable elaborations for the
2165 elaboration routine. If we are just annotating types, throw away the
2166 initialization if it isn't a constant. */
2167 if ((extern_flag && !constant_p)
2168 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2169 var_init = NULL_TREE;
2170
2171 /* At the global level, an initializer requiring code to be generated
2172 produces elaboration statements. Check that such statements are allowed,
2173 that is, not violating a No_Elaboration_Code restriction. */
2174 if (global_bindings_p () && var_init != 0 && !init_const)
2175 Check_Elaboration_Code_Allowed (gnat_node);
2176
2177 DECL_INITIAL (var_decl) = var_init;
2178 TREE_READONLY (var_decl) = const_flag;
2179 DECL_EXTERNAL (var_decl) = extern_flag;
2180 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2181 TREE_CONSTANT (var_decl) = constant_p;
2182 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2183 = TYPE_VOLATILE (type);
2184
2185 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2186 try to fiddle with DECL_COMMON. However, on platforms that don't
2187 support global BSS sections, uninitialized global variables would
2188 go in DATA instead, thus increasing the size of the executable. */
2189 if (!flag_no_common
2190 && TREE_CODE (var_decl) == VAR_DECL
2191 && TREE_PUBLIC (var_decl)
2192 && !have_global_bss_p ())
2193 DECL_COMMON (var_decl) = 1;
2194
2195 /* At the global binding level, we need to allocate static storage for the
2196 variable if it isn't external. Otherwise, we allocate automatic storage
2197 unless requested not to. */
2198 TREE_STATIC (var_decl)
2199 = !extern_flag && (static_flag || global_bindings_p ());
2200
2201 /* For an external constant whose initializer is not absolute, do not emit
2202 debug info. In DWARF this would mean a global relocation in a read-only
2203 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2204 if (extern_flag
2205 && constant_p
2206 && var_init
2207 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2208 != null_pointer_node)
2209 DECL_IGNORED_P (var_decl) = 1;
2210
2211 if (TREE_SIDE_EFFECTS (var_decl))
2212 TREE_ADDRESSABLE (var_decl) = 1;
2213
2214 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2215 if (TREE_CODE (var_decl) == VAR_DECL)
2216 process_attributes (&var_decl, &attr_list, true, gnat_node);
2217
2218 /* Add this decl to the current binding level. */
2219 gnat_pushdecl (var_decl, gnat_node);
2220
2221 if (TREE_CODE (var_decl) == VAR_DECL)
2222 {
2223 if (asm_name)
2224 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2225
2226 if (global_bindings_p ())
2227 rest_of_decl_compilation (var_decl, true, 0);
2228 }
2229
2230 return var_decl;
2231 }
2232 \f
2233 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2234
2235 static bool
2236 aggregate_type_contains_array_p (tree type)
2237 {
2238 switch (TREE_CODE (type))
2239 {
2240 case RECORD_TYPE:
2241 case UNION_TYPE:
2242 case QUAL_UNION_TYPE:
2243 {
2244 tree field;
2245 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2246 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2247 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2248 return true;
2249 return false;
2250 }
2251
2252 case ARRAY_TYPE:
2253 return true;
2254
2255 default:
2256 gcc_unreachable ();
2257 }
2258 }
2259
2260 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2261 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2262 nonzero, it is the specified size of the field. If POS is nonzero, it is
2263 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2264 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2265 means we are allowed to take the address of the field; if it is negative,
2266 we should not make a bitfield, which is used by make_aligning_type. */
2267
2268 tree
2269 create_field_decl (tree field_name, tree field_type, tree record_type,
2270 tree size, tree pos, int packed, int addressable)
2271 {
2272 tree field_decl = build_decl (input_location,
2273 FIELD_DECL, field_name, field_type);
2274
2275 DECL_CONTEXT (field_decl) = record_type;
2276 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2277
2278 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2279 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2280 Likewise for an aggregate without specified position that contains an
2281 array, because in this case slices of variable length of this array
2282 must be handled by GCC and variable-sized objects need to be aligned
2283 to at least a byte boundary. */
2284 if (packed && (TYPE_MODE (field_type) == BLKmode
2285 || (!pos
2286 && AGGREGATE_TYPE_P (field_type)
2287 && aggregate_type_contains_array_p (field_type))))
2288 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2289
2290 /* If a size is specified, use it. Otherwise, if the record type is packed
2291 compute a size to use, which may differ from the object's natural size.
2292 We always set a size in this case to trigger the checks for bitfield
2293 creation below, which is typically required when no position has been
2294 specified. */
2295 if (size)
2296 size = convert (bitsizetype, size);
2297 else if (packed == 1)
2298 {
2299 size = rm_size (field_type);
2300 if (TYPE_MODE (field_type) == BLKmode)
2301 size = round_up (size, BITS_PER_UNIT);
2302 }
2303
2304 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2305 specified for two reasons: first if the size differs from the natural
2306 size. Second, if the alignment is insufficient. There are a number of
2307 ways the latter can be true.
2308
2309 We never make a bitfield if the type of the field has a nonconstant size,
2310 because no such entity requiring bitfield operations should reach here.
2311
2312 We do *preventively* make a bitfield when there might be the need for it
2313 but we don't have all the necessary information to decide, as is the case
2314 of a field with no specified position in a packed record.
2315
2316 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2317 in layout_decl or finish_record_type to clear the bit_field indication if
2318 it is in fact not needed. */
2319 if (addressable >= 0
2320 && size
2321 && TREE_CODE (size) == INTEGER_CST
2322 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2323 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2324 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2325 || packed
2326 || (TYPE_ALIGN (record_type) != 0
2327 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2328 {
2329 DECL_BIT_FIELD (field_decl) = 1;
2330 DECL_SIZE (field_decl) = size;
2331 if (!packed && !pos)
2332 {
2333 if (TYPE_ALIGN (record_type) != 0
2334 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2335 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2336 else
2337 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2338 }
2339 }
2340
2341 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2342
2343 /* Bump the alignment if need be, either for bitfield/packing purposes or
2344 to satisfy the type requirements if no such consideration applies. When
2345 we get the alignment from the type, indicate if this is from an explicit
2346 user request, which prevents stor-layout from lowering it later on. */
2347 {
2348 unsigned int bit_align
2349 = (DECL_BIT_FIELD (field_decl) ? 1
2350 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2351
2352 if (bit_align > DECL_ALIGN (field_decl))
2353 DECL_ALIGN (field_decl) = bit_align;
2354 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2355 {
2356 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2357 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2358 }
2359 }
2360
2361 if (pos)
2362 {
2363 /* We need to pass in the alignment the DECL is known to have.
2364 This is the lowest-order bit set in POS, but no more than
2365 the alignment of the record, if one is specified. Note
2366 that an alignment of 0 is taken as infinite. */
2367 unsigned int known_align;
2368
2369 if (tree_fits_uhwi_p (pos))
2370 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2371 else
2372 known_align = BITS_PER_UNIT;
2373
2374 if (TYPE_ALIGN (record_type)
2375 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2376 known_align = TYPE_ALIGN (record_type);
2377
2378 layout_decl (field_decl, known_align);
2379 SET_DECL_OFFSET_ALIGN (field_decl,
2380 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2381 : BITS_PER_UNIT);
2382 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2383 &DECL_FIELD_BIT_OFFSET (field_decl),
2384 DECL_OFFSET_ALIGN (field_decl), pos);
2385 }
2386
2387 /* In addition to what our caller says, claim the field is addressable if we
2388 know that its type is not suitable.
2389
2390 The field may also be "technically" nonaddressable, meaning that even if
2391 we attempt to take the field's address we will actually get the address
2392 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2393 value we have at this point is not accurate enough, so we don't account
2394 for this here and let finish_record_type decide. */
2395 if (!addressable && !type_for_nonaliased_component_p (field_type))
2396 addressable = 1;
2397
2398 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2399
2400 return field_decl;
2401 }
2402 \f
2403 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2404 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2405 (either an In parameter or an address of a pass-by-ref parameter). */
2406
2407 tree
2408 create_param_decl (tree param_name, tree param_type, bool readonly)
2409 {
2410 tree param_decl = build_decl (input_location,
2411 PARM_DECL, param_name, param_type);
2412
2413 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2414 can lead to various ABI violations. */
2415 if (targetm.calls.promote_prototypes (NULL_TREE)
2416 && INTEGRAL_TYPE_P (param_type)
2417 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2418 {
2419 /* We have to be careful about biased types here. Make a subtype
2420 of integer_type_node with the proper biasing. */
2421 if (TREE_CODE (param_type) == INTEGER_TYPE
2422 && TYPE_BIASED_REPRESENTATION_P (param_type))
2423 {
2424 tree subtype
2425 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2426 TREE_TYPE (subtype) = integer_type_node;
2427 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2428 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2429 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2430 param_type = subtype;
2431 }
2432 else
2433 param_type = integer_type_node;
2434 }
2435
2436 DECL_ARG_TYPE (param_decl) = param_type;
2437 TREE_READONLY (param_decl) = readonly;
2438 return param_decl;
2439 }
2440 \f
2441 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2442 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2443 changed. GNAT_NODE is used for the position of error messages. */
2444
2445 void
2446 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2447 Node_Id gnat_node)
2448 {
2449 struct attrib *attr;
2450
2451 for (attr = *attr_list; attr; attr = attr->next)
2452 switch (attr->type)
2453 {
2454 case ATTR_MACHINE_ATTRIBUTE:
2455 Sloc_to_locus (Sloc (gnat_node), &input_location);
2456 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2457 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2458 break;
2459
2460 case ATTR_LINK_ALIAS:
2461 if (!DECL_EXTERNAL (*node))
2462 {
2463 TREE_STATIC (*node) = 1;
2464 assemble_alias (*node, attr->name);
2465 }
2466 break;
2467
2468 case ATTR_WEAK_EXTERNAL:
2469 if (SUPPORTS_WEAK)
2470 declare_weak (*node);
2471 else
2472 post_error ("?weak declarations not supported on this target",
2473 attr->error_point);
2474 break;
2475
2476 case ATTR_LINK_SECTION:
2477 if (targetm_common.have_named_sections)
2478 {
2479 DECL_SECTION_NAME (*node)
2480 = build_string (IDENTIFIER_LENGTH (attr->name),
2481 IDENTIFIER_POINTER (attr->name));
2482 DECL_COMMON (*node) = 0;
2483 }
2484 else
2485 post_error ("?section attributes are not supported for this target",
2486 attr->error_point);
2487 break;
2488
2489 case ATTR_LINK_CONSTRUCTOR:
2490 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2491 TREE_USED (*node) = 1;
2492 break;
2493
2494 case ATTR_LINK_DESTRUCTOR:
2495 DECL_STATIC_DESTRUCTOR (*node) = 1;
2496 TREE_USED (*node) = 1;
2497 break;
2498
2499 case ATTR_THREAD_LOCAL_STORAGE:
2500 DECL_TLS_MODEL (*node) = decl_default_tls_model (*node);
2501 DECL_COMMON (*node) = 0;
2502 break;
2503 }
2504
2505 *attr_list = NULL;
2506 }
2507 \f
2508 /* Record DECL as a global renaming pointer. */
2509
2510 void
2511 record_global_renaming_pointer (tree decl)
2512 {
2513 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2514 vec_safe_push (global_renaming_pointers, decl);
2515 }
2516
2517 /* Invalidate the global renaming pointers that are not constant, lest their
2518 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2519 that we should not blindly invalidate everything here because of the need
2520 to propagate constant values through renaming. */
2521
2522 void
2523 invalidate_global_renaming_pointers (void)
2524 {
2525 unsigned int i;
2526 tree iter;
2527
2528 if (global_renaming_pointers == NULL)
2529 return;
2530
2531 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2532 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2533 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2534
2535 vec_free (global_renaming_pointers);
2536 }
2537
2538 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2539 a power of 2. */
2540
2541 bool
2542 value_factor_p (tree value, HOST_WIDE_INT factor)
2543 {
2544 if (tree_fits_uhwi_p (value))
2545 return tree_to_uhwi (value) % factor == 0;
2546
2547 if (TREE_CODE (value) == MULT_EXPR)
2548 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2549 || value_factor_p (TREE_OPERAND (value, 1), factor));
2550
2551 return false;
2552 }
2553
2554 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2555
2556 static unsigned int
2557 scale_by_factor_of (tree expr, unsigned int value)
2558 {
2559 expr = remove_conversions (expr, true);
2560
2561 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2562 corresponding to the number of trailing zeros of the mask. */
2563 if (TREE_CODE (expr) == BIT_AND_EXPR
2564 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2565 {
2566 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2567 unsigned int i = 0;
2568
2569 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2570 {
2571 mask >>= 1;
2572 value *= 2;
2573 i++;
2574 }
2575 }
2576
2577 return value;
2578 }
2579
2580 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2581 unless we can prove these 2 fields are laid out in such a way that no gap
2582 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2583 is the distance in bits between the end of PREV_FIELD and the starting
2584 position of CURR_FIELD. It is ignored if null. */
2585
2586 static bool
2587 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2588 {
2589 /* If this is the first field of the record, there cannot be any gap */
2590 if (!prev_field)
2591 return false;
2592
2593 /* If the previous field is a union type, then return false: The only
2594 time when such a field is not the last field of the record is when
2595 there are other components at fixed positions after it (meaning there
2596 was a rep clause for every field), in which case we don't want the
2597 alignment constraint to override them. */
2598 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2599 return false;
2600
2601 /* If the distance between the end of prev_field and the beginning of
2602 curr_field is constant, then there is a gap if the value of this
2603 constant is not null. */
2604 if (offset && tree_fits_uhwi_p (offset))
2605 return !integer_zerop (offset);
2606
2607 /* If the size and position of the previous field are constant,
2608 then check the sum of this size and position. There will be a gap
2609 iff it is not multiple of the current field alignment. */
2610 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2611 && tree_fits_uhwi_p (bit_position (prev_field)))
2612 return ((tree_to_uhwi (bit_position (prev_field))
2613 + tree_to_uhwi (DECL_SIZE (prev_field)))
2614 % DECL_ALIGN (curr_field) != 0);
2615
2616 /* If both the position and size of the previous field are multiples
2617 of the current field alignment, there cannot be any gap. */
2618 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2619 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2620 return false;
2621
2622 /* Fallback, return that there may be a potential gap */
2623 return true;
2624 }
2625
2626 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2627 of the decl. */
2628
2629 tree
2630 create_label_decl (tree label_name, Node_Id gnat_node)
2631 {
2632 tree label_decl
2633 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2634
2635 DECL_MODE (label_decl) = VOIDmode;
2636
2637 /* Add this decl to the current binding level. */
2638 gnat_pushdecl (label_decl, gnat_node);
2639
2640 return label_decl;
2641 }
2642 \f
2643 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2644 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2645 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2646 PARM_DECL nodes chained through the DECL_CHAIN field).
2647
2648 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2649 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2650 used for the position of the decl. */
2651
2652 tree
2653 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2654 tree param_decl_list, enum inline_status_t inline_status,
2655 bool public_flag, bool extern_flag, bool artificial_flag,
2656 struct attrib *attr_list, Node_Id gnat_node)
2657 {
2658 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2659 subprog_type);
2660 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2661 TREE_TYPE (subprog_type));
2662 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2663
2664 /* If this is a non-inline function nested inside an inlined external
2665 function, we cannot honor both requests without cloning the nested
2666 function in the current unit since it is private to the other unit.
2667 We could inline the nested function as well but it's probably better
2668 to err on the side of too little inlining. */
2669 if (inline_status != is_enabled
2670 && !public_flag
2671 && current_function_decl
2672 && DECL_DECLARED_INLINE_P (current_function_decl)
2673 && DECL_EXTERNAL (current_function_decl))
2674 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2675
2676 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2677 DECL_EXTERNAL (subprog_decl) = extern_flag;
2678
2679 switch (inline_status)
2680 {
2681 case is_suppressed:
2682 DECL_UNINLINABLE (subprog_decl) = 1;
2683 break;
2684
2685 case is_disabled:
2686 break;
2687
2688 case is_enabled:
2689 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
2690 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
2691 break;
2692
2693 default:
2694 gcc_unreachable ();
2695 }
2696
2697 TREE_PUBLIC (subprog_decl) = public_flag;
2698 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2699 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2700 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2701
2702 DECL_ARTIFICIAL (result_decl) = 1;
2703 DECL_IGNORED_P (result_decl) = 1;
2704 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2705 DECL_RESULT (subprog_decl) = result_decl;
2706
2707 if (asm_name)
2708 {
2709 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2710
2711 /* The expand_main_function circuitry expects "main_identifier_node" to
2712 designate the DECL_NAME of the 'main' entry point, in turn expected
2713 to be declared as the "main" function literally by default. Ada
2714 program entry points are typically declared with a different name
2715 within the binder generated file, exported as 'main' to satisfy the
2716 system expectations. Force main_identifier_node in this case. */
2717 if (asm_name == main_identifier_node)
2718 DECL_NAME (subprog_decl) = main_identifier_node;
2719 }
2720
2721 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
2722
2723 /* Add this decl to the current binding level. */
2724 gnat_pushdecl (subprog_decl, gnat_node);
2725
2726 /* Output the assembler code and/or RTL for the declaration. */
2727 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2728
2729 return subprog_decl;
2730 }
2731 \f
2732 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2733 body. This routine needs to be invoked before processing the declarations
2734 appearing in the subprogram. */
2735
2736 void
2737 begin_subprog_body (tree subprog_decl)
2738 {
2739 tree param_decl;
2740
2741 announce_function (subprog_decl);
2742
2743 /* This function is being defined. */
2744 TREE_STATIC (subprog_decl) = 1;
2745
2746 current_function_decl = subprog_decl;
2747
2748 /* Enter a new binding level and show that all the parameters belong to
2749 this function. */
2750 gnat_pushlevel ();
2751
2752 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2753 param_decl = DECL_CHAIN (param_decl))
2754 DECL_CONTEXT (param_decl) = subprog_decl;
2755
2756 make_decl_rtl (subprog_decl);
2757 }
2758
2759 /* Finish translating the current subprogram and set its BODY. */
2760
2761 void
2762 end_subprog_body (tree body)
2763 {
2764 tree fndecl = current_function_decl;
2765
2766 /* Attach the BLOCK for this level to the function and pop the level. */
2767 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2768 DECL_INITIAL (fndecl) = current_binding_level->block;
2769 gnat_poplevel ();
2770
2771 /* Mark the RESULT_DECL as being in this subprogram. */
2772 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2773
2774 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2775 if (TREE_CODE (body) == BIND_EXPR)
2776 {
2777 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2778 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2779 }
2780
2781 DECL_SAVED_TREE (fndecl) = body;
2782
2783 current_function_decl = decl_function_context (fndecl);
2784 }
2785
2786 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2787
2788 void
2789 rest_of_subprog_body_compilation (tree subprog_decl)
2790 {
2791 /* We cannot track the location of errors past this point. */
2792 error_gnat_node = Empty;
2793
2794 /* If we're only annotating types, don't actually compile this function. */
2795 if (type_annotate_only)
2796 return;
2797
2798 /* Dump functions before gimplification. */
2799 dump_function (TDI_original, subprog_decl);
2800
2801 if (!decl_function_context (subprog_decl))
2802 cgraph_finalize_function (subprog_decl, false);
2803 else
2804 /* Register this function with cgraph just far enough to get it
2805 added to our parent's nested function list. */
2806 (void) cgraph_get_create_node (subprog_decl);
2807 }
2808
2809 tree
2810 gnat_builtin_function (tree decl)
2811 {
2812 gnat_pushdecl (decl, Empty);
2813 return decl;
2814 }
2815
2816 /* Return an integer type with the number of bits of precision given by
2817 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2818 it is a signed type. */
2819
2820 tree
2821 gnat_type_for_size (unsigned precision, int unsignedp)
2822 {
2823 tree t;
2824 char type_name[20];
2825
2826 if (precision <= 2 * MAX_BITS_PER_WORD
2827 && signed_and_unsigned_types[precision][unsignedp])
2828 return signed_and_unsigned_types[precision][unsignedp];
2829
2830 if (unsignedp)
2831 t = make_unsigned_type (precision);
2832 else
2833 t = make_signed_type (precision);
2834
2835 if (precision <= 2 * MAX_BITS_PER_WORD)
2836 signed_and_unsigned_types[precision][unsignedp] = t;
2837
2838 if (!TYPE_NAME (t))
2839 {
2840 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
2841 TYPE_NAME (t) = get_identifier (type_name);
2842 }
2843
2844 return t;
2845 }
2846
2847 /* Likewise for floating-point types. */
2848
2849 static tree
2850 float_type_for_precision (int precision, enum machine_mode mode)
2851 {
2852 tree t;
2853 char type_name[20];
2854
2855 if (float_types[(int) mode])
2856 return float_types[(int) mode];
2857
2858 float_types[(int) mode] = t = make_node (REAL_TYPE);
2859 TYPE_PRECISION (t) = precision;
2860 layout_type (t);
2861
2862 gcc_assert (TYPE_MODE (t) == mode);
2863 if (!TYPE_NAME (t))
2864 {
2865 sprintf (type_name, "FLOAT_%d", precision);
2866 TYPE_NAME (t) = get_identifier (type_name);
2867 }
2868
2869 return t;
2870 }
2871
2872 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2873 an unsigned type; otherwise a signed type is returned. */
2874
2875 tree
2876 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2877 {
2878 if (mode == BLKmode)
2879 return NULL_TREE;
2880
2881 if (mode == VOIDmode)
2882 return void_type_node;
2883
2884 if (COMPLEX_MODE_P (mode))
2885 return NULL_TREE;
2886
2887 if (SCALAR_FLOAT_MODE_P (mode))
2888 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2889
2890 if (SCALAR_INT_MODE_P (mode))
2891 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2892
2893 if (VECTOR_MODE_P (mode))
2894 {
2895 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2896 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2897 if (inner_type)
2898 return build_vector_type_for_mode (inner_type, mode);
2899 }
2900
2901 return NULL_TREE;
2902 }
2903
2904 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2905
2906 tree
2907 gnat_unsigned_type (tree type_node)
2908 {
2909 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2910
2911 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2912 {
2913 type = copy_node (type);
2914 TREE_TYPE (type) = type_node;
2915 }
2916 else if (TREE_TYPE (type_node)
2917 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2918 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2919 {
2920 type = copy_node (type);
2921 TREE_TYPE (type) = TREE_TYPE (type_node);
2922 }
2923
2924 return type;
2925 }
2926
2927 /* Return the signed version of a TYPE_NODE, a scalar type. */
2928
2929 tree
2930 gnat_signed_type (tree type_node)
2931 {
2932 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2933
2934 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2935 {
2936 type = copy_node (type);
2937 TREE_TYPE (type) = type_node;
2938 }
2939 else if (TREE_TYPE (type_node)
2940 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2941 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2942 {
2943 type = copy_node (type);
2944 TREE_TYPE (type) = TREE_TYPE (type_node);
2945 }
2946
2947 return type;
2948 }
2949
2950 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2951 transparently converted to each other. */
2952
2953 int
2954 gnat_types_compatible_p (tree t1, tree t2)
2955 {
2956 enum tree_code code;
2957
2958 /* This is the default criterion. */
2959 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2960 return 1;
2961
2962 /* We only check structural equivalence here. */
2963 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2964 return 0;
2965
2966 /* Vector types are also compatible if they have the same number of subparts
2967 and the same form of (scalar) element type. */
2968 if (code == VECTOR_TYPE
2969 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2970 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2971 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2972 return 1;
2973
2974 /* Array types are also compatible if they are constrained and have the same
2975 domain(s) and the same component type. */
2976 if (code == ARRAY_TYPE
2977 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2978 || (TYPE_DOMAIN (t1)
2979 && TYPE_DOMAIN (t2)
2980 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2982 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2983 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2984 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2985 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2986 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2987 return 1;
2988
2989 return 0;
2990 }
2991
2992 /* Return true if EXPR is a useless type conversion. */
2993
2994 bool
2995 gnat_useless_type_conversion (tree expr)
2996 {
2997 if (CONVERT_EXPR_P (expr)
2998 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2999 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3000 return gnat_types_compatible_p (TREE_TYPE (expr),
3001 TREE_TYPE (TREE_OPERAND (expr, 0)));
3002
3003 return false;
3004 }
3005
3006 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3007
3008 bool
3009 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3010 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3011 {
3012 return TYPE_CI_CO_LIST (t) == cico_list
3013 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3014 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3015 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3016 }
3017 \f
3018 /* EXP is an expression for the size of an object. If this size contains
3019 discriminant references, replace them with the maximum (if MAX_P) or
3020 minimum (if !MAX_P) possible value of the discriminant. */
3021
3022 tree
3023 max_size (tree exp, bool max_p)
3024 {
3025 enum tree_code code = TREE_CODE (exp);
3026 tree type = TREE_TYPE (exp);
3027
3028 switch (TREE_CODE_CLASS (code))
3029 {
3030 case tcc_declaration:
3031 case tcc_constant:
3032 return exp;
3033
3034 case tcc_vl_exp:
3035 if (code == CALL_EXPR)
3036 {
3037 tree t, *argarray;
3038 int n, i;
3039
3040 t = maybe_inline_call_in_expr (exp);
3041 if (t)
3042 return max_size (t, max_p);
3043
3044 n = call_expr_nargs (exp);
3045 gcc_assert (n > 0);
3046 argarray = XALLOCAVEC (tree, n);
3047 for (i = 0; i < n; i++)
3048 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3049 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3050 }
3051 break;
3052
3053 case tcc_reference:
3054 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3055 modify. Otherwise, we treat it like a variable. */
3056 if (!CONTAINS_PLACEHOLDER_P (exp))
3057 return exp;
3058
3059 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3060 return
3061 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3062
3063 case tcc_comparison:
3064 return max_p ? size_one_node : size_zero_node;
3065
3066 case tcc_unary:
3067 if (code == NON_LVALUE_EXPR)
3068 return max_size (TREE_OPERAND (exp, 0), max_p);
3069
3070 return fold_build1 (code, type,
3071 max_size (TREE_OPERAND (exp, 0),
3072 code == NEGATE_EXPR ? !max_p : max_p));
3073
3074 case tcc_binary:
3075 {
3076 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3077 tree rhs = max_size (TREE_OPERAND (exp, 1),
3078 code == MINUS_EXPR ? !max_p : max_p);
3079
3080 /* Special-case wanting the maximum value of a MIN_EXPR.
3081 In that case, if one side overflows, return the other. */
3082 if (max_p && code == MIN_EXPR)
3083 {
3084 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3085 return lhs;
3086
3087 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3088 return rhs;
3089 }
3090
3091 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3092 overflowing and the RHS a variable. */
3093 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3094 && TREE_CODE (lhs) == INTEGER_CST
3095 && TREE_OVERFLOW (lhs)
3096 && !TREE_CONSTANT (rhs))
3097 return lhs;
3098
3099 return size_binop (code, lhs, rhs);
3100 }
3101
3102 case tcc_expression:
3103 switch (TREE_CODE_LENGTH (code))
3104 {
3105 case 1:
3106 if (code == SAVE_EXPR)
3107 return exp;
3108
3109 return fold_build1 (code, type,
3110 max_size (TREE_OPERAND (exp, 0), max_p));
3111
3112 case 2:
3113 if (code == COMPOUND_EXPR)
3114 return max_size (TREE_OPERAND (exp, 1), max_p);
3115
3116 return fold_build2 (code, type,
3117 max_size (TREE_OPERAND (exp, 0), max_p),
3118 max_size (TREE_OPERAND (exp, 1), max_p));
3119
3120 case 3:
3121 if (code == COND_EXPR)
3122 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3123 max_size (TREE_OPERAND (exp, 1), max_p),
3124 max_size (TREE_OPERAND (exp, 2), max_p));
3125
3126 default:
3127 break;
3128 }
3129
3130 /* Other tree classes cannot happen. */
3131 default:
3132 break;
3133 }
3134
3135 gcc_unreachable ();
3136 }
3137 \f
3138 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3139 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3140 Return a constructor for the template. */
3141
3142 tree
3143 build_template (tree template_type, tree array_type, tree expr)
3144 {
3145 vec<constructor_elt, va_gc> *template_elts = NULL;
3146 tree bound_list = NULL_TREE;
3147 tree field;
3148
3149 while (TREE_CODE (array_type) == RECORD_TYPE
3150 && (TYPE_PADDING_P (array_type)
3151 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3152 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3153
3154 if (TREE_CODE (array_type) == ARRAY_TYPE
3155 || (TREE_CODE (array_type) == INTEGER_TYPE
3156 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3157 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3158
3159 /* First make the list for a CONSTRUCTOR for the template. Go down the
3160 field list of the template instead of the type chain because this
3161 array might be an Ada array of arrays and we can't tell where the
3162 nested arrays stop being the underlying object. */
3163
3164 for (field = TYPE_FIELDS (template_type); field;
3165 (bound_list
3166 ? (bound_list = TREE_CHAIN (bound_list))
3167 : (array_type = TREE_TYPE (array_type))),
3168 field = DECL_CHAIN (DECL_CHAIN (field)))
3169 {
3170 tree bounds, min, max;
3171
3172 /* If we have a bound list, get the bounds from there. Likewise
3173 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3174 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3175 This will give us a maximum range. */
3176 if (bound_list)
3177 bounds = TREE_VALUE (bound_list);
3178 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3179 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3180 else if (expr && TREE_CODE (expr) == PARM_DECL
3181 && DECL_BY_COMPONENT_PTR_P (expr))
3182 bounds = TREE_TYPE (field);
3183 else
3184 gcc_unreachable ();
3185
3186 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3187 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3188
3189 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3190 substitute it from OBJECT. */
3191 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3192 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3193
3194 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3195 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3196 }
3197
3198 return gnat_build_constructor (template_type, template_elts);
3199 }
3200 \f
3201 /* Return true if TYPE is suitable for the element type of a vector. */
3202
3203 static bool
3204 type_for_vector_element_p (tree type)
3205 {
3206 enum machine_mode mode;
3207
3208 if (!INTEGRAL_TYPE_P (type)
3209 && !SCALAR_FLOAT_TYPE_P (type)
3210 && !FIXED_POINT_TYPE_P (type))
3211 return false;
3212
3213 mode = TYPE_MODE (type);
3214 if (GET_MODE_CLASS (mode) != MODE_INT
3215 && !SCALAR_FLOAT_MODE_P (mode)
3216 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3217 return false;
3218
3219 return true;
3220 }
3221
3222 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3223 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3224 attribute declaration and want to issue error messages on failure. */
3225
3226 static tree
3227 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3228 {
3229 unsigned HOST_WIDE_INT size_int, inner_size_int;
3230 int nunits;
3231
3232 /* Silently punt on variable sizes. We can't make vector types for them,
3233 need to ignore them on front-end generated subtypes of unconstrained
3234 base types, and this attribute is for binding implementors, not end
3235 users, so we should never get there from legitimate explicit uses. */
3236 if (!tree_fits_uhwi_p (size))
3237 return NULL_TREE;
3238 size_int = tree_to_uhwi (size);
3239
3240 if (!type_for_vector_element_p (inner_type))
3241 {
3242 if (attribute)
3243 error ("invalid element type for attribute %qs",
3244 IDENTIFIER_POINTER (attribute));
3245 return NULL_TREE;
3246 }
3247 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3248
3249 if (size_int % inner_size_int)
3250 {
3251 if (attribute)
3252 error ("vector size not an integral multiple of component size");
3253 return NULL_TREE;
3254 }
3255
3256 if (size_int == 0)
3257 {
3258 if (attribute)
3259 error ("zero vector size");
3260 return NULL_TREE;
3261 }
3262
3263 nunits = size_int / inner_size_int;
3264 if (nunits & (nunits - 1))
3265 {
3266 if (attribute)
3267 error ("number of components of vector not a power of two");
3268 return NULL_TREE;
3269 }
3270
3271 return build_vector_type (inner_type, nunits);
3272 }
3273
3274 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3275 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3276 processing the attribute and want to issue error messages on failure. */
3277
3278 static tree
3279 build_vector_type_for_array (tree array_type, tree attribute)
3280 {
3281 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3282 TYPE_SIZE_UNIT (array_type),
3283 attribute);
3284 if (!vector_type)
3285 return NULL_TREE;
3286
3287 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3288 return vector_type;
3289 }
3290 \f
3291 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3292 being built; the new decl is chained on to the front of the list. */
3293
3294 static tree
3295 make_descriptor_field (const char *name, tree type, tree rec_type,
3296 tree initial, tree field_list)
3297 {
3298 tree field
3299 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3300 NULL_TREE, 0, 0);
3301
3302 DECL_INITIAL (field) = initial;
3303 DECL_CHAIN (field) = field_list;
3304 return field;
3305 }
3306
3307 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3308 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3309 type contains in its DECL_INITIAL the expression to use when a constructor
3310 is made for the type. GNAT_ENTITY is an entity used to print out an error
3311 message if the mechanism cannot be applied to an object of that type and
3312 also for the name. */
3313
3314 tree
3315 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3316 {
3317 tree record_type = make_node (RECORD_TYPE);
3318 tree pointer32_type, pointer64_type;
3319 tree field_list = NULL_TREE;
3320 int klass, ndim, i, dtype = 0;
3321 tree inner_type, tem;
3322 tree *idx_arr;
3323
3324 /* If TYPE is an unconstrained array, use the underlying array type. */
3325 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3326 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3327
3328 /* If this is an array, compute the number of dimensions in the array,
3329 get the index types, and point to the inner type. */
3330 if (TREE_CODE (type) != ARRAY_TYPE)
3331 ndim = 0;
3332 else
3333 for (ndim = 1, inner_type = type;
3334 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3335 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3336 ndim++, inner_type = TREE_TYPE (inner_type))
3337 ;
3338
3339 idx_arr = XALLOCAVEC (tree, ndim);
3340
3341 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3342 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3343 for (i = ndim - 1, inner_type = type;
3344 i >= 0;
3345 i--, inner_type = TREE_TYPE (inner_type))
3346 idx_arr[i] = TYPE_DOMAIN (inner_type);
3347 else
3348 for (i = 0, inner_type = type;
3349 i < ndim;
3350 i++, inner_type = TREE_TYPE (inner_type))
3351 idx_arr[i] = TYPE_DOMAIN (inner_type);
3352
3353 /* Now get the DTYPE value. */
3354 switch (TREE_CODE (type))
3355 {
3356 case INTEGER_TYPE:
3357 case ENUMERAL_TYPE:
3358 case BOOLEAN_TYPE:
3359 if (TYPE_VAX_FLOATING_POINT_P (type))
3360 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3361 {
3362 case 6:
3363 dtype = 10;
3364 break;
3365 case 9:
3366 dtype = 11;
3367 break;
3368 case 15:
3369 dtype = 27;
3370 break;
3371 }
3372 else
3373 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3374 {
3375 case 8:
3376 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3377 break;
3378 case 16:
3379 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3380 break;
3381 case 32:
3382 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3383 break;
3384 case 64:
3385 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3386 break;
3387 case 128:
3388 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3389 break;
3390 }
3391 break;
3392
3393 case REAL_TYPE:
3394 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3395 break;
3396
3397 case COMPLEX_TYPE:
3398 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3399 && TYPE_VAX_FLOATING_POINT_P (type))
3400 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3401 {
3402 case 6:
3403 dtype = 12;
3404 break;
3405 case 9:
3406 dtype = 13;
3407 break;
3408 case 15:
3409 dtype = 29;
3410 }
3411 else
3412 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3413 break;
3414
3415 case ARRAY_TYPE:
3416 dtype = 14;
3417 break;
3418
3419 default:
3420 break;
3421 }
3422
3423 /* Get the CLASS value. */
3424 switch (mech)
3425 {
3426 case By_Descriptor_A:
3427 case By_Short_Descriptor_A:
3428 klass = 4;
3429 break;
3430 case By_Descriptor_NCA:
3431 case By_Short_Descriptor_NCA:
3432 klass = 10;
3433 break;
3434 case By_Descriptor_SB:
3435 case By_Short_Descriptor_SB:
3436 klass = 15;
3437 break;
3438 case By_Descriptor:
3439 case By_Short_Descriptor:
3440 case By_Descriptor_S:
3441 case By_Short_Descriptor_S:
3442 default:
3443 klass = 1;
3444 break;
3445 }
3446
3447 /* Make the type for a descriptor for VMS. The first four fields are the
3448 same for all types. */
3449 field_list
3450 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3451 size_in_bytes ((mech == By_Descriptor_A
3452 || mech == By_Short_Descriptor_A)
3453 ? inner_type : type),
3454 field_list);
3455 field_list
3456 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3457 size_int (dtype), field_list);
3458 field_list
3459 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3460 size_int (klass), field_list);
3461
3462 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3463 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3464
3465 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3466 that we cannot build a template call to the CE routine as it would get a
3467 wrong source location; instead we use a second placeholder for it. */
3468 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3469 build0 (PLACEHOLDER_EXPR, type));
3470 tem = build3 (COND_EXPR, pointer32_type,
3471 Pmode != SImode
3472 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3473 build_int_cstu (pointer64_type, 0x80000000))
3474 : boolean_false_node,
3475 build0 (PLACEHOLDER_EXPR, void_type_node),
3476 convert (pointer32_type, tem));
3477
3478 field_list
3479 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3480 field_list);
3481
3482 switch (mech)
3483 {
3484 case By_Descriptor:
3485 case By_Short_Descriptor:
3486 case By_Descriptor_S:
3487 case By_Short_Descriptor_S:
3488 break;
3489
3490 case By_Descriptor_SB:
3491 case By_Short_Descriptor_SB:
3492 field_list
3493 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3494 record_type,
3495 (TREE_CODE (type) == ARRAY_TYPE
3496 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3497 : size_zero_node),
3498 field_list);
3499 field_list
3500 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3501 record_type,
3502 (TREE_CODE (type) == ARRAY_TYPE
3503 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3504 : size_zero_node),
3505 field_list);
3506 break;
3507
3508 case By_Descriptor_A:
3509 case By_Short_Descriptor_A:
3510 case By_Descriptor_NCA:
3511 case By_Short_Descriptor_NCA:
3512 field_list
3513 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3514 record_type, size_zero_node, field_list);
3515
3516 field_list
3517 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3518 record_type, size_zero_node, field_list);
3519
3520 field_list
3521 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3522 record_type,
3523 size_int ((mech == By_Descriptor_NCA
3524 || mech == By_Short_Descriptor_NCA)
3525 ? 0
3526 /* Set FL_COLUMN, FL_COEFF, and
3527 FL_BOUNDS. */
3528 : (TREE_CODE (type) == ARRAY_TYPE
3529 && TYPE_CONVENTION_FORTRAN_P
3530 (type)
3531 ? 224 : 192)),
3532 field_list);
3533
3534 field_list
3535 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3536 record_type, size_int (ndim), field_list);
3537
3538 field_list
3539 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3540 record_type, size_in_bytes (type),
3541 field_list);
3542
3543 /* Now build a pointer to the 0,0,0... element. */
3544 tem = build0 (PLACEHOLDER_EXPR, type);
3545 for (i = 0, inner_type = type; i < ndim;
3546 i++, inner_type = TREE_TYPE (inner_type))
3547 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3548 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3549 NULL_TREE, NULL_TREE);
3550
3551 field_list
3552 = make_descriptor_field ("A0", pointer32_type, record_type,
3553 build1 (ADDR_EXPR, pointer32_type, tem),
3554 field_list);
3555
3556 /* Next come the addressing coefficients. */
3557 tem = size_one_node;
3558 for (i = 0; i < ndim; i++)
3559 {
3560 char fname[3];
3561 tree idx_length
3562 = size_binop (MULT_EXPR, tem,
3563 size_binop (PLUS_EXPR,
3564 size_binop (MINUS_EXPR,
3565 TYPE_MAX_VALUE (idx_arr[i]),
3566 TYPE_MIN_VALUE (idx_arr[i])),
3567 size_int (1)));
3568
3569 fname[0] = ((mech == By_Descriptor_NCA ||
3570 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3571 fname[1] = '0' + i, fname[2] = 0;
3572 field_list
3573 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3574 record_type, idx_length, field_list);
3575
3576 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3577 tem = idx_length;
3578 }
3579
3580 /* Finally here are the bounds. */
3581 for (i = 0; i < ndim; i++)
3582 {
3583 char fname[3];
3584
3585 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3586 field_list
3587 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3588 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3589 field_list);
3590
3591 fname[0] = 'U';
3592 field_list
3593 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3594 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3595 field_list);
3596 }
3597 break;
3598
3599 default:
3600 post_error ("unsupported descriptor type for &", gnat_entity);
3601 }
3602
3603 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3604 finish_record_type (record_type, nreverse (field_list), 0, false);
3605 return record_type;
3606 }
3607
3608 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3609 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3610 type contains in its DECL_INITIAL the expression to use when a constructor
3611 is made for the type. GNAT_ENTITY is an entity used to print out an error
3612 message if the mechanism cannot be applied to an object of that type and
3613 also for the name. */
3614
3615 tree
3616 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3617 {
3618 tree record_type = make_node (RECORD_TYPE);
3619 tree pointer64_type;
3620 tree field_list = NULL_TREE;
3621 int klass, ndim, i, dtype = 0;
3622 tree inner_type, tem;
3623 tree *idx_arr;
3624
3625 /* If TYPE is an unconstrained array, use the underlying array type. */
3626 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3627 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3628
3629 /* If this is an array, compute the number of dimensions in the array,
3630 get the index types, and point to the inner type. */
3631 if (TREE_CODE (type) != ARRAY_TYPE)
3632 ndim = 0;
3633 else
3634 for (ndim = 1, inner_type = type;
3635 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3636 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3637 ndim++, inner_type = TREE_TYPE (inner_type))
3638 ;
3639
3640 idx_arr = XALLOCAVEC (tree, ndim);
3641
3642 if (mech != By_Descriptor_NCA
3643 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3644 for (i = ndim - 1, inner_type = type;
3645 i >= 0;
3646 i--, inner_type = TREE_TYPE (inner_type))
3647 idx_arr[i] = TYPE_DOMAIN (inner_type);
3648 else
3649 for (i = 0, inner_type = type;
3650 i < ndim;
3651 i++, inner_type = TREE_TYPE (inner_type))
3652 idx_arr[i] = TYPE_DOMAIN (inner_type);
3653
3654 /* Now get the DTYPE value. */
3655 switch (TREE_CODE (type))
3656 {
3657 case INTEGER_TYPE:
3658 case ENUMERAL_TYPE:
3659 case BOOLEAN_TYPE:
3660 if (TYPE_VAX_FLOATING_POINT_P (type))
3661 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3662 {
3663 case 6:
3664 dtype = 10;
3665 break;
3666 case 9:
3667 dtype = 11;
3668 break;
3669 case 15:
3670 dtype = 27;
3671 break;
3672 }
3673 else
3674 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3675 {
3676 case 8:
3677 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3678 break;
3679 case 16:
3680 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3681 break;
3682 case 32:
3683 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3684 break;
3685 case 64:
3686 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3687 break;
3688 case 128:
3689 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3690 break;
3691 }
3692 break;
3693
3694 case REAL_TYPE:
3695 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3696 break;
3697
3698 case COMPLEX_TYPE:
3699 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3700 && TYPE_VAX_FLOATING_POINT_P (type))
3701 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type)))
3702 {
3703 case 6:
3704 dtype = 12;
3705 break;
3706 case 9:
3707 dtype = 13;
3708 break;
3709 case 15:
3710 dtype = 29;
3711 }
3712 else
3713 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3714 break;
3715
3716 case ARRAY_TYPE:
3717 dtype = 14;
3718 break;
3719
3720 default:
3721 break;
3722 }
3723
3724 /* Get the CLASS value. */
3725 switch (mech)
3726 {
3727 case By_Descriptor_A:
3728 klass = 4;
3729 break;
3730 case By_Descriptor_NCA:
3731 klass = 10;
3732 break;
3733 case By_Descriptor_SB:
3734 klass = 15;
3735 break;
3736 case By_Descriptor:
3737 case By_Descriptor_S:
3738 default:
3739 klass = 1;
3740 break;
3741 }
3742
3743 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3744 are the same for all types. */
3745 field_list
3746 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3747 record_type, size_int (1), field_list);
3748 field_list
3749 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3750 record_type, size_int (dtype), field_list);
3751 field_list
3752 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3753 record_type, size_int (klass), field_list);
3754 field_list
3755 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3756 record_type, size_int (-1), field_list);
3757 field_list
3758 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3759 record_type,
3760 size_in_bytes (mech == By_Descriptor_A
3761 ? inner_type : type),
3762 field_list);
3763
3764 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3765
3766 field_list
3767 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3768 build_unary_op (ADDR_EXPR, pointer64_type,
3769 build0 (PLACEHOLDER_EXPR, type)),
3770 field_list);
3771
3772 switch (mech)
3773 {
3774 case By_Descriptor:
3775 case By_Descriptor_S:
3776 break;
3777
3778 case By_Descriptor_SB:
3779 field_list
3780 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3781 record_type,
3782 (TREE_CODE (type) == ARRAY_TYPE
3783 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3784 : size_zero_node),
3785 field_list);
3786 field_list
3787 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3788 record_type,
3789 (TREE_CODE (type) == ARRAY_TYPE
3790 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3791 : size_zero_node),
3792 field_list);
3793 break;
3794
3795 case By_Descriptor_A:
3796 case By_Descriptor_NCA:
3797 field_list
3798 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3799 record_type, size_zero_node, field_list);
3800
3801 field_list
3802 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3803 record_type, size_zero_node, field_list);
3804
3805 dtype = (mech == By_Descriptor_NCA
3806 ? 0
3807 /* Set FL_COLUMN, FL_COEFF, and
3808 FL_BOUNDS. */
3809 : (TREE_CODE (type) == ARRAY_TYPE
3810 && TYPE_CONVENTION_FORTRAN_P (type)
3811 ? 224 : 192));
3812 field_list
3813 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3814 record_type, size_int (dtype),
3815 field_list);
3816
3817 field_list
3818 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3819 record_type, size_int (ndim), field_list);
3820
3821 field_list
3822 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3823 record_type, size_int (0), field_list);
3824 field_list
3825 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3826 record_type, size_in_bytes (type),
3827 field_list);
3828
3829 /* Now build a pointer to the 0,0,0... element. */
3830 tem = build0 (PLACEHOLDER_EXPR, type);
3831 for (i = 0, inner_type = type; i < ndim;
3832 i++, inner_type = TREE_TYPE (inner_type))
3833 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3834 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3835 NULL_TREE, NULL_TREE);
3836
3837 field_list
3838 = make_descriptor_field ("A0", pointer64_type, record_type,
3839 build1 (ADDR_EXPR, pointer64_type, tem),
3840 field_list);
3841
3842 /* Next come the addressing coefficients. */
3843 tem = size_one_node;
3844 for (i = 0; i < ndim; i++)
3845 {
3846 char fname[3];
3847 tree idx_length
3848 = size_binop (MULT_EXPR, tem,
3849 size_binop (PLUS_EXPR,
3850 size_binop (MINUS_EXPR,
3851 TYPE_MAX_VALUE (idx_arr[i]),
3852 TYPE_MIN_VALUE (idx_arr[i])),
3853 size_int (1)));
3854
3855 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3856 fname[1] = '0' + i, fname[2] = 0;
3857 field_list
3858 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3859 record_type, idx_length, field_list);
3860
3861 if (mech == By_Descriptor_NCA)
3862 tem = idx_length;
3863 }
3864
3865 /* Finally here are the bounds. */
3866 for (i = 0; i < ndim; i++)
3867 {
3868 char fname[3];
3869
3870 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3871 field_list
3872 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3873 record_type,
3874 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3875
3876 fname[0] = 'U';
3877 field_list
3878 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3879 record_type,
3880 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3881 }
3882 break;
3883
3884 default:
3885 post_error ("unsupported descriptor type for &", gnat_entity);
3886 }
3887
3888 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3889 finish_record_type (record_type, nreverse (field_list), 0, false);
3890 return record_type;
3891 }
3892
3893 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3894 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3895
3896 tree
3897 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3898 {
3899 vec<constructor_elt, va_gc> *v = NULL;
3900 tree field;
3901
3902 gnu_expr = maybe_unconstrained_array (gnu_expr);
3903 gnu_expr = gnat_protect_expr (gnu_expr);
3904 gnat_mark_addressable (gnu_expr);
3905
3906 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3907 routine in case we have a 32-bit descriptor. */
3908 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3909 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3910 N_Raise_Constraint_Error),
3911 gnu_expr);
3912
3913 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3914 {
3915 tree value
3916 = convert (TREE_TYPE (field),
3917 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3918 gnu_expr));
3919 CONSTRUCTOR_APPEND_ELT (v, field, value);
3920 }
3921
3922 return gnat_build_constructor (gnu_type, v);
3923 }
3924
3925 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3926 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3927 which the VMS descriptor is passed. */
3928
3929 static tree
3930 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3931 {
3932 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3933 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3934 /* The CLASS field is the 3rd field in the descriptor. */
3935 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3936 /* The POINTER field is the 6th field in the descriptor. */
3937 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3938
3939 /* Retrieve the value of the POINTER field. */
3940 tree gnu_expr64
3941 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3942
3943 if (POINTER_TYPE_P (gnu_type))
3944 return convert (gnu_type, gnu_expr64);
3945
3946 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3947 {
3948 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3949 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3950 tree template_type = TREE_TYPE (p_bounds_type);
3951 tree min_field = TYPE_FIELDS (template_type);
3952 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3953 tree template_tree, template_addr, aflags, dimct, t, u;
3954 /* See the head comment of build_vms_descriptor. */
3955 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3956 tree lfield, ufield;
3957 vec<constructor_elt, va_gc> *v;
3958
3959 /* Convert POINTER to the pointer-to-array type. */
3960 gnu_expr64 = convert (p_array_type, gnu_expr64);
3961
3962 switch (iklass)
3963 {
3964 case 1: /* Class S */
3965 case 15: /* Class SB */
3966 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3967 vec_alloc (v, 2);
3968 t = DECL_CHAIN (DECL_CHAIN (klass));
3969 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3970 CONSTRUCTOR_APPEND_ELT (v, min_field,
3971 convert (TREE_TYPE (min_field),
3972 integer_one_node));
3973 CONSTRUCTOR_APPEND_ELT (v, max_field,
3974 convert (TREE_TYPE (max_field), t));
3975 template_tree = gnat_build_constructor (template_type, v);
3976 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3977
3978 /* For class S, we are done. */
3979 if (iklass == 1)
3980 break;
3981
3982 /* Test that we really have a SB descriptor, like DEC Ada. */
3983 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3984 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3985 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3986 /* If so, there is already a template in the descriptor and
3987 it is located right after the POINTER field. The fields are
3988 64bits so they must be repacked. */
3989 t = DECL_CHAIN (pointer);
3990 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3991 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3992
3993 t = DECL_CHAIN (t);
3994 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3995 ufield = convert
3996 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3997
3998 /* Build the template in the form of a constructor. */
3999 vec_alloc (v, 2);
4000 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
4001 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
4002 ufield);
4003 template_tree = gnat_build_constructor (template_type, v);
4004
4005 /* Otherwise use the {1, LENGTH} template we build above. */
4006 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4007 build_unary_op (ADDR_EXPR, p_bounds_type,
4008 template_tree),
4009 template_addr);
4010 break;
4011
4012 case 4: /* Class A */
4013 /* The AFLAGS field is the 3rd field after the pointer in the
4014 descriptor. */
4015 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4016 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4017 /* The DIMCT field is the next field in the descriptor after
4018 aflags. */
4019 t = DECL_CHAIN (t);
4020 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4021 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4022 or FL_COEFF or FL_BOUNDS not set. */
4023 u = build_int_cst (TREE_TYPE (aflags), 192);
4024 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4025 build_binary_op (NE_EXPR, boolean_type_node,
4026 dimct,
4027 convert (TREE_TYPE (dimct),
4028 size_one_node)),
4029 build_binary_op (NE_EXPR, boolean_type_node,
4030 build2 (BIT_AND_EXPR,
4031 TREE_TYPE (aflags),
4032 aflags, u),
4033 u));
4034 /* There is already a template in the descriptor and it is located
4035 in block 3. The fields are 64bits so they must be repacked. */
4036 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
4037 (t)))));
4038 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4039 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
4040
4041 t = DECL_CHAIN (t);
4042 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4043 ufield = convert
4044 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
4045
4046 /* Build the template in the form of a constructor. */
4047 vec_alloc (v, 2);
4048 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
4049 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
4050 ufield);
4051 template_tree = gnat_build_constructor (template_type, v);
4052 template_tree = build3 (COND_EXPR, template_type, u,
4053 build_call_raise (CE_Length_Check_Failed, Empty,
4054 N_Raise_Constraint_Error),
4055 template_tree);
4056 template_addr
4057 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4058 break;
4059
4060 case 10: /* Class NCA */
4061 default:
4062 post_error ("unsupported descriptor type for &", gnat_subprog);
4063 template_addr = integer_zero_node;
4064 break;
4065 }
4066
4067 /* Build the fat pointer in the form of a constructor. */
4068 vec_alloc (v, 2);
4069 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
4070 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4071 template_addr);
4072 return gnat_build_constructor (gnu_type, v);
4073 }
4074
4075 else
4076 gcc_unreachable ();
4077 }
4078
4079 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4080 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4081 which the VMS descriptor is passed. */
4082
4083 static tree
4084 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
4085 {
4086 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4087 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4088 /* The CLASS field is the 3rd field in the descriptor. */
4089 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
4090 /* The POINTER field is the 4th field in the descriptor. */
4091 tree pointer = DECL_CHAIN (klass);
4092
4093 /* Retrieve the value of the POINTER field. */
4094 tree gnu_expr32
4095 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
4096
4097 if (POINTER_TYPE_P (gnu_type))
4098 return convert (gnu_type, gnu_expr32);
4099
4100 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
4101 {
4102 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
4103 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
4104 tree template_type = TREE_TYPE (p_bounds_type);
4105 tree min_field = TYPE_FIELDS (template_type);
4106 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
4107 tree template_tree, template_addr, aflags, dimct, t, u;
4108 /* See the head comment of build_vms_descriptor. */
4109 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
4110 vec<constructor_elt, va_gc> *v;
4111
4112 /* Convert POINTER to the pointer-to-array type. */
4113 gnu_expr32 = convert (p_array_type, gnu_expr32);
4114
4115 switch (iklass)
4116 {
4117 case 1: /* Class S */
4118 case 15: /* Class SB */
4119 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4120 vec_alloc (v, 2);
4121 t = TYPE_FIELDS (desc_type);
4122 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4123 CONSTRUCTOR_APPEND_ELT (v, min_field,
4124 convert (TREE_TYPE (min_field),
4125 integer_one_node));
4126 CONSTRUCTOR_APPEND_ELT (v, max_field,
4127 convert (TREE_TYPE (max_field), t));
4128 template_tree = gnat_build_constructor (template_type, v);
4129 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
4130
4131 /* For class S, we are done. */
4132 if (iklass == 1)
4133 break;
4134
4135 /* Test that we really have a SB descriptor, like DEC Ada. */
4136 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
4137 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
4138 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
4139 /* If so, there is already a template in the descriptor and
4140 it is located right after the POINTER field. */
4141 t = DECL_CHAIN (pointer);
4142 template_tree
4143 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4144 /* Otherwise use the {1, LENGTH} template we build above. */
4145 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4146 build_unary_op (ADDR_EXPR, p_bounds_type,
4147 template_tree),
4148 template_addr);
4149 break;
4150
4151 case 4: /* Class A */
4152 /* The AFLAGS field is the 7th field in the descriptor. */
4153 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4154 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4155 /* The DIMCT field is the 8th field in the descriptor. */
4156 t = DECL_CHAIN (t);
4157 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4158 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4159 or FL_COEFF or FL_BOUNDS not set. */
4160 u = build_int_cst (TREE_TYPE (aflags), 192);
4161 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4162 build_binary_op (NE_EXPR, boolean_type_node,
4163 dimct,
4164 convert (TREE_TYPE (dimct),
4165 size_one_node)),
4166 build_binary_op (NE_EXPR, boolean_type_node,
4167 build2 (BIT_AND_EXPR,
4168 TREE_TYPE (aflags),
4169 aflags, u),
4170 u));
4171 /* There is already a template in the descriptor and it is
4172 located at the start of block 3 (12th field). */
4173 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4174 template_tree
4175 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4176 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4177 build_call_raise (CE_Length_Check_Failed, Empty,
4178 N_Raise_Constraint_Error),
4179 template_tree);
4180 template_addr
4181 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4182 break;
4183
4184 case 10: /* Class NCA */
4185 default:
4186 post_error ("unsupported descriptor type for &", gnat_subprog);
4187 template_addr = integer_zero_node;
4188 break;
4189 }
4190
4191 /* Build the fat pointer in the form of a constructor. */
4192 vec_alloc (v, 2);
4193 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4194 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4195 template_addr);
4196
4197 return gnat_build_constructor (gnu_type, v);
4198 }
4199
4200 else
4201 gcc_unreachable ();
4202 }
4203
4204 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4205 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4206 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4207 descriptor is passed. */
4208
4209 tree
4210 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4211 Entity_Id gnat_subprog)
4212 {
4213 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4214 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4215 tree mbo = TYPE_FIELDS (desc_type);
4216 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4217 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4218 tree is64bit, gnu_expr32, gnu_expr64;
4219
4220 /* If the field name is not MBO, it must be 32-bit and no alternate.
4221 Otherwise primary must be 64-bit and alternate 32-bit. */
4222 if (strcmp (mbostr, "MBO") != 0)
4223 {
4224 tree ret = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4225 return ret;
4226 }
4227
4228 /* Build the test for 64-bit descriptor. */
4229 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4230 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4231 is64bit
4232 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4233 build_binary_op (EQ_EXPR, boolean_type_node,
4234 convert (integer_type_node, mbo),
4235 integer_one_node),
4236 build_binary_op (EQ_EXPR, boolean_type_node,
4237 convert (integer_type_node, mbmo),
4238 integer_minus_one_node));
4239
4240 /* Build the 2 possible end results. */
4241 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
4242 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4243 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
4244 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4245 }
4246 \f
4247 /* Build a type to be used to represent an aliased object whose nominal type
4248 is an unconstrained array. This consists of a RECORD_TYPE containing a
4249 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4250 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4251 an arbitrary unconstrained object. Use NAME as the name of the record.
4252 DEBUG_INFO_P is true if we need to write debug information for the type. */
4253
4254 tree
4255 build_unc_object_type (tree template_type, tree object_type, tree name,
4256 bool debug_info_p)
4257 {
4258 tree type = make_node (RECORD_TYPE);
4259 tree template_field
4260 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4261 NULL_TREE, NULL_TREE, 0, 1);
4262 tree array_field
4263 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4264 NULL_TREE, NULL_TREE, 0, 1);
4265
4266 TYPE_NAME (type) = name;
4267 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4268 DECL_CHAIN (template_field) = array_field;
4269 finish_record_type (type, template_field, 0, true);
4270
4271 /* Declare it now since it will never be declared otherwise. This is
4272 necessary to ensure that its subtrees are properly marked. */
4273 create_type_decl (name, type, true, debug_info_p, Empty);
4274
4275 return type;
4276 }
4277
4278 /* Same, taking a thin or fat pointer type instead of a template type. */
4279
4280 tree
4281 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4282 tree name, bool debug_info_p)
4283 {
4284 tree template_type;
4285
4286 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4287
4288 template_type
4289 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4290 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4291 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4292
4293 return
4294 build_unc_object_type (template_type, object_type, name, debug_info_p);
4295 }
4296 \f
4297 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4298 In the normal case this is just two adjustments, but we have more to
4299 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4300
4301 void
4302 update_pointer_to (tree old_type, tree new_type)
4303 {
4304 tree ptr = TYPE_POINTER_TO (old_type);
4305 tree ref = TYPE_REFERENCE_TO (old_type);
4306 tree t;
4307
4308 /* If this is the main variant, process all the other variants first. */
4309 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4310 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4311 update_pointer_to (t, new_type);
4312
4313 /* If no pointers and no references, we are done. */
4314 if (!ptr && !ref)
4315 return;
4316
4317 /* Merge the old type qualifiers in the new type.
4318
4319 Each old variant has qualifiers for specific reasons, and the new
4320 designated type as well. Each set of qualifiers represents useful
4321 information grabbed at some point, and merging the two simply unifies
4322 these inputs into the final type description.
4323
4324 Consider for instance a volatile type frozen after an access to constant
4325 type designating it; after the designated type's freeze, we get here with
4326 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4327 when the access type was processed. We will make a volatile and readonly
4328 designated type, because that's what it really is.
4329
4330 We might also get here for a non-dummy OLD_TYPE variant with different
4331 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4332 to private record type elaboration (see the comments around the call to
4333 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4334 the qualifiers in those cases too, to avoid accidentally discarding the
4335 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4336 new_type
4337 = build_qualified_type (new_type,
4338 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4339
4340 /* If old type and new type are identical, there is nothing to do. */
4341 if (old_type == new_type)
4342 return;
4343
4344 /* Otherwise, first handle the simple case. */
4345 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4346 {
4347 tree new_ptr, new_ref;
4348
4349 /* If pointer or reference already points to new type, nothing to do.
4350 This can happen as update_pointer_to can be invoked multiple times
4351 on the same couple of types because of the type variants. */
4352 if ((ptr && TREE_TYPE (ptr) == new_type)
4353 || (ref && TREE_TYPE (ref) == new_type))
4354 return;
4355
4356 /* Chain PTR and its variants at the end. */
4357 new_ptr = TYPE_POINTER_TO (new_type);
4358 if (new_ptr)
4359 {
4360 while (TYPE_NEXT_PTR_TO (new_ptr))
4361 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4362 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4363 }
4364 else
4365 TYPE_POINTER_TO (new_type) = ptr;
4366
4367 /* Now adjust them. */
4368 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4369 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4370 {
4371 TREE_TYPE (t) = new_type;
4372 if (TYPE_NULL_BOUNDS (t))
4373 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4374 }
4375
4376 /* Chain REF and its variants at the end. */
4377 new_ref = TYPE_REFERENCE_TO (new_type);
4378 if (new_ref)
4379 {
4380 while (TYPE_NEXT_REF_TO (new_ref))
4381 new_ref = TYPE_NEXT_REF_TO (new_ref);
4382 TYPE_NEXT_REF_TO (new_ref) = ref;
4383 }
4384 else
4385 TYPE_REFERENCE_TO (new_type) = ref;
4386
4387 /* Now adjust them. */
4388 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4389 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4390 TREE_TYPE (t) = new_type;
4391
4392 TYPE_POINTER_TO (old_type) = NULL_TREE;
4393 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4394 }
4395
4396 /* Now deal with the unconstrained array case. In this case the pointer
4397 is actually a record where both fields are pointers to dummy nodes.
4398 Turn them into pointers to the correct types using update_pointer_to.
4399 Likewise for the pointer to the object record (thin pointer). */
4400 else
4401 {
4402 tree new_ptr = TYPE_POINTER_TO (new_type);
4403
4404 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4405
4406 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4407 since update_pointer_to can be invoked multiple times on the same
4408 couple of types because of the type variants. */
4409 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4410 return;
4411
4412 update_pointer_to
4413 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4414 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4415
4416 update_pointer_to
4417 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4418 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4419
4420 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4421 TYPE_OBJECT_RECORD_TYPE (new_type));
4422
4423 TYPE_POINTER_TO (old_type) = NULL_TREE;
4424 }
4425 }
4426 \f
4427 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4428 unconstrained one. This involves making or finding a template. */
4429
4430 static tree
4431 convert_to_fat_pointer (tree type, tree expr)
4432 {
4433 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4434 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4435 tree etype = TREE_TYPE (expr);
4436 tree template_addr;
4437 vec<constructor_elt, va_gc> *v;
4438 vec_alloc (v, 2);
4439
4440 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4441 array (compare_fat_pointers ensures that this is the full discriminant)
4442 and a valid pointer to the bounds. This latter property is necessary
4443 since the compiler can hoist the load of the bounds done through it. */
4444 if (integer_zerop (expr))
4445 {
4446 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4447 tree null_bounds, t;
4448
4449 if (TYPE_NULL_BOUNDS (ptr_template_type))
4450 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4451 else
4452 {
4453 /* The template type can still be dummy at this point so we build an
4454 empty constructor. The middle-end will fill it in with zeros. */
4455 t = build_constructor (template_type,
4456 NULL);
4457 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4458 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4459 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4460 }
4461
4462 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4463 fold_convert (p_array_type, null_pointer_node));
4464 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4465 t = build_constructor (type, v);
4466 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4467 TREE_CONSTANT (t) = 0;
4468 TREE_STATIC (t) = 1;
4469
4470 return t;
4471 }
4472
4473 /* If EXPR is a thin pointer, make template and data from the record. */
4474 if (TYPE_IS_THIN_POINTER_P (etype))
4475 {
4476 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4477
4478 expr = gnat_protect_expr (expr);
4479
4480 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4481 the thin pointer value has been shifted so we shift it back to get
4482 the template address. */
4483 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4484 {
4485 template_addr
4486 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4487 fold_build1 (NEGATE_EXPR, sizetype,
4488 byte_position
4489 (DECL_CHAIN (field))));
4490 template_addr
4491 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
4492 template_addr);
4493 }
4494
4495 /* Otherwise we explicitly take the address of the fields. */
4496 else
4497 {
4498 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
4499 template_addr
4500 = build_unary_op (ADDR_EXPR, NULL_TREE,
4501 build_component_ref (expr, NULL_TREE, field,
4502 false));
4503 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4504 build_component_ref (expr, NULL_TREE,
4505 DECL_CHAIN (field),
4506 false));
4507 }
4508 }
4509
4510 /* Otherwise, build the constructor for the template. */
4511 else
4512 template_addr
4513 = build_unary_op (ADDR_EXPR, NULL_TREE,
4514 build_template (template_type, TREE_TYPE (etype),
4515 expr));
4516
4517 /* The final result is a constructor for the fat pointer.
4518
4519 If EXPR is an argument of a foreign convention subprogram, the type it
4520 points to is directly the component type. In this case, the expression
4521 type may not match the corresponding FIELD_DECL type at this point, so we
4522 call "convert" here to fix that up if necessary. This type consistency is
4523 required, for instance because it ensures that possible later folding of
4524 COMPONENT_REFs against this constructor always yields something of the
4525 same type as the initial reference.
4526
4527 Note that the call to "build_template" above is still fine because it
4528 will only refer to the provided TEMPLATE_TYPE in this case. */
4529 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
4530 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
4531 return gnat_build_constructor (type, v);
4532 }
4533 \f
4534 /* Create an expression whose value is that of EXPR,
4535 converted to type TYPE. The TREE_TYPE of the value
4536 is always TYPE. This function implements all reasonable
4537 conversions; callers should filter out those that are
4538 not permitted by the language being compiled. */
4539
4540 tree
4541 convert (tree type, tree expr)
4542 {
4543 tree etype = TREE_TYPE (expr);
4544 enum tree_code ecode = TREE_CODE (etype);
4545 enum tree_code code = TREE_CODE (type);
4546
4547 /* If the expression is already of the right type, we are done. */
4548 if (etype == type)
4549 return expr;
4550
4551 /* If both input and output have padding and are of variable size, do this
4552 as an unchecked conversion. Likewise if one is a mere variant of the
4553 other, so we avoid a pointless unpad/repad sequence. */
4554 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4555 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4556 && (!TREE_CONSTANT (TYPE_SIZE (type))
4557 || !TREE_CONSTANT (TYPE_SIZE (etype))
4558 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4559 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4560 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4561 ;
4562
4563 /* If the output type has padding, convert to the inner type and make a
4564 constructor to build the record, unless a variable size is involved. */
4565 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4566 {
4567 vec<constructor_elt, va_gc> *v;
4568
4569 /* If we previously converted from another type and our type is
4570 of variable size, remove the conversion to avoid the need for
4571 variable-sized temporaries. Likewise for a conversion between
4572 original and packable version. */
4573 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4574 && (!TREE_CONSTANT (TYPE_SIZE (type))
4575 || (ecode == RECORD_TYPE
4576 && TYPE_NAME (etype)
4577 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4578 expr = TREE_OPERAND (expr, 0);
4579
4580 /* If we are just removing the padding from expr, convert the original
4581 object if we have variable size in order to avoid the need for some
4582 variable-sized temporaries. Likewise if the padding is a variant
4583 of the other, so we avoid a pointless unpad/repad sequence. */
4584 if (TREE_CODE (expr) == COMPONENT_REF
4585 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4586 && (!TREE_CONSTANT (TYPE_SIZE (type))
4587 || TYPE_MAIN_VARIANT (type)
4588 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4589 || (ecode == RECORD_TYPE
4590 && TYPE_NAME (etype)
4591 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4592 return convert (type, TREE_OPERAND (expr, 0));
4593
4594 /* If the inner type is of self-referential size and the expression type
4595 is a record, do this as an unchecked conversion. But first pad the
4596 expression if possible to have the same size on both sides. */
4597 if (ecode == RECORD_TYPE
4598 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4599 {
4600 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4601 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4602 false, false, false, true),
4603 expr);
4604 return unchecked_convert (type, expr, false);
4605 }
4606
4607 /* If we are converting between array types with variable size, do the
4608 final conversion as an unchecked conversion, again to avoid the need
4609 for some variable-sized temporaries. If valid, this conversion is
4610 very likely purely technical and without real effects. */
4611 if (ecode == ARRAY_TYPE
4612 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4613 && !TREE_CONSTANT (TYPE_SIZE (etype))
4614 && !TREE_CONSTANT (TYPE_SIZE (type)))
4615 return unchecked_convert (type,
4616 convert (TREE_TYPE (TYPE_FIELDS (type)),
4617 expr),
4618 false);
4619
4620 vec_alloc (v, 1);
4621 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4622 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4623 return gnat_build_constructor (type, v);
4624 }
4625
4626 /* If the input type has padding, remove it and convert to the output type.
4627 The conditions ordering is arranged to ensure that the output type is not
4628 a padding type here, as it is not clear whether the conversion would
4629 always be correct if this was to happen. */
4630 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4631 {
4632 tree unpadded;
4633
4634 /* If we have just converted to this padded type, just get the
4635 inner expression. */
4636 if (TREE_CODE (expr) == CONSTRUCTOR
4637 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4638 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4639 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4640
4641 /* Otherwise, build an explicit component reference. */
4642 else
4643 unpadded
4644 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4645
4646 return convert (type, unpadded);
4647 }
4648
4649 /* If the input is a biased type, adjust first. */
4650 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4651 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4652 fold_convert (TREE_TYPE (etype),
4653 expr),
4654 TYPE_MIN_VALUE (etype)));
4655
4656 /* If the input is a justified modular type, we need to extract the actual
4657 object before converting it to any other type with the exceptions of an
4658 unconstrained array or of a mere type variant. It is useful to avoid the
4659 extraction and conversion in the type variant case because it could end
4660 up replacing a VAR_DECL expr by a constructor and we might be about the
4661 take the address of the result. */
4662 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4663 && code != UNCONSTRAINED_ARRAY_TYPE
4664 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4665 return convert (type, build_component_ref (expr, NULL_TREE,
4666 TYPE_FIELDS (etype), false));
4667
4668 /* If converting to a type that contains a template, convert to the data
4669 type and then build the template. */
4670 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4671 {
4672 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4673 vec<constructor_elt, va_gc> *v;
4674 vec_alloc (v, 2);
4675
4676 /* If the source already has a template, get a reference to the
4677 associated array only, as we are going to rebuild a template
4678 for the target type anyway. */
4679 expr = maybe_unconstrained_array (expr);
4680
4681 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4682 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4683 obj_type, NULL_TREE));
4684 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4685 convert (obj_type, expr));
4686 return gnat_build_constructor (type, v);
4687 }
4688
4689 /* There are some cases of expressions that we process specially. */
4690 switch (TREE_CODE (expr))
4691 {
4692 case ERROR_MARK:
4693 return expr;
4694
4695 case NULL_EXPR:
4696 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4697 conversion in gnat_expand_expr. NULL_EXPR does not represent
4698 and actual value, so no conversion is needed. */
4699 expr = copy_node (expr);
4700 TREE_TYPE (expr) = type;
4701 return expr;
4702
4703 case STRING_CST:
4704 /* If we are converting a STRING_CST to another constrained array type,
4705 just make a new one in the proper type. */
4706 if (code == ecode && AGGREGATE_TYPE_P (etype)
4707 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4708 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4709 {
4710 expr = copy_node (expr);
4711 TREE_TYPE (expr) = type;
4712 return expr;
4713 }
4714 break;
4715
4716 case VECTOR_CST:
4717 /* If we are converting a VECTOR_CST to a mere type variant, just make
4718 a new one in the proper type. */
4719 if (code == ecode && gnat_types_compatible_p (type, etype))
4720 {
4721 expr = copy_node (expr);
4722 TREE_TYPE (expr) = type;
4723 return expr;
4724 }
4725
4726 case CONSTRUCTOR:
4727 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4728 another padding type around the same type, just make a new one in
4729 the proper type. */
4730 if (code == ecode
4731 && (gnat_types_compatible_p (type, etype)
4732 || (code == RECORD_TYPE
4733 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4734 && TREE_TYPE (TYPE_FIELDS (type))
4735 == TREE_TYPE (TYPE_FIELDS (etype)))))
4736 {
4737 expr = copy_node (expr);
4738 TREE_TYPE (expr) = type;
4739 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4740 return expr;
4741 }
4742
4743 /* Likewise for a conversion between original and packable version, or
4744 conversion between types of the same size and with the same list of
4745 fields, but we have to work harder to preserve type consistency. */
4746 if (code == ecode
4747 && code == RECORD_TYPE
4748 && (TYPE_NAME (type) == TYPE_NAME (etype)
4749 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4750
4751 {
4752 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4753 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4754 vec<constructor_elt, va_gc> *v;
4755 vec_alloc (v, len);
4756 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4757 unsigned HOST_WIDE_INT idx;
4758 tree index, value;
4759
4760 /* Whether we need to clear TREE_CONSTANT et al. on the output
4761 constructor when we convert in place. */
4762 bool clear_constant = false;
4763
4764 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4765 {
4766 /* Skip the missing fields in the CONSTRUCTOR. */
4767 while (efield && field && !SAME_FIELD_P (efield, index))
4768 {
4769 efield = DECL_CHAIN (efield);
4770 field = DECL_CHAIN (field);
4771 }
4772 /* The field must be the same. */
4773 if (!(efield && field && SAME_FIELD_P (efield, field)))
4774 break;
4775 constructor_elt elt
4776 = {field, convert (TREE_TYPE (field), value)};
4777 v->quick_push (elt);
4778
4779 /* If packing has made this field a bitfield and the input
4780 value couldn't be emitted statically any more, we need to
4781 clear TREE_CONSTANT on our output. */
4782 if (!clear_constant
4783 && TREE_CONSTANT (expr)
4784 && !CONSTRUCTOR_BITFIELD_P (efield)
4785 && CONSTRUCTOR_BITFIELD_P (field)
4786 && !initializer_constant_valid_for_bitfield_p (value))
4787 clear_constant = true;
4788
4789 efield = DECL_CHAIN (efield);
4790 field = DECL_CHAIN (field);
4791 }
4792
4793 /* If we have been able to match and convert all the input fields
4794 to their output type, convert in place now. We'll fallback to a
4795 view conversion downstream otherwise. */
4796 if (idx == len)
4797 {
4798 expr = copy_node (expr);
4799 TREE_TYPE (expr) = type;
4800 CONSTRUCTOR_ELTS (expr) = v;
4801 if (clear_constant)
4802 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4803 return expr;
4804 }
4805 }
4806
4807 /* Likewise for a conversion between array type and vector type with a
4808 compatible representative array. */
4809 else if (code == VECTOR_TYPE
4810 && ecode == ARRAY_TYPE
4811 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4812 etype))
4813 {
4814 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4815 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4816 vec<constructor_elt, va_gc> *v;
4817 unsigned HOST_WIDE_INT ix;
4818 tree value;
4819
4820 /* Build a VECTOR_CST from a *constant* array constructor. */
4821 if (TREE_CONSTANT (expr))
4822 {
4823 bool constant_p = true;
4824
4825 /* Iterate through elements and check if all constructor
4826 elements are *_CSTs. */
4827 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4828 if (!CONSTANT_CLASS_P (value))
4829 {
4830 constant_p = false;
4831 break;
4832 }
4833
4834 if (constant_p)
4835 return build_vector_from_ctor (type,
4836 CONSTRUCTOR_ELTS (expr));
4837 }
4838
4839 /* Otherwise, build a regular vector constructor. */
4840 vec_alloc (v, len);
4841 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4842 {
4843 constructor_elt elt = {NULL_TREE, value};
4844 v->quick_push (elt);
4845 }
4846 expr = copy_node (expr);
4847 TREE_TYPE (expr) = type;
4848 CONSTRUCTOR_ELTS (expr) = v;
4849 return expr;
4850 }
4851 break;
4852
4853 case UNCONSTRAINED_ARRAY_REF:
4854 /* First retrieve the underlying array. */
4855 expr = maybe_unconstrained_array (expr);
4856 etype = TREE_TYPE (expr);
4857 ecode = TREE_CODE (etype);
4858 break;
4859
4860 case VIEW_CONVERT_EXPR:
4861 {
4862 /* GCC 4.x is very sensitive to type consistency overall, and view
4863 conversions thus are very frequent. Even though just "convert"ing
4864 the inner operand to the output type is fine in most cases, it
4865 might expose unexpected input/output type mismatches in special
4866 circumstances so we avoid such recursive calls when we can. */
4867 tree op0 = TREE_OPERAND (expr, 0);
4868
4869 /* If we are converting back to the original type, we can just
4870 lift the input conversion. This is a common occurrence with
4871 switches back-and-forth amongst type variants. */
4872 if (type == TREE_TYPE (op0))
4873 return op0;
4874
4875 /* Otherwise, if we're converting between two aggregate or vector
4876 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4877 target type in place or to just convert the inner expression. */
4878 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4879 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4880 {
4881 /* If we are converting between mere variants, we can just
4882 substitute the VIEW_CONVERT_EXPR in place. */
4883 if (gnat_types_compatible_p (type, etype))
4884 return build1 (VIEW_CONVERT_EXPR, type, op0);
4885
4886 /* Otherwise, we may just bypass the input view conversion unless
4887 one of the types is a fat pointer, which is handled by
4888 specialized code below which relies on exact type matching. */
4889 else if (!TYPE_IS_FAT_POINTER_P (type)
4890 && !TYPE_IS_FAT_POINTER_P (etype))
4891 return convert (type, op0);
4892 }
4893
4894 break;
4895 }
4896
4897 default:
4898 break;
4899 }
4900
4901 /* Check for converting to a pointer to an unconstrained array. */
4902 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4903 return convert_to_fat_pointer (type, expr);
4904
4905 /* If we are converting between two aggregate or vector types that are mere
4906 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4907 to a vector type from its representative array type. */
4908 else if ((code == ecode
4909 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4910 && gnat_types_compatible_p (type, etype))
4911 || (code == VECTOR_TYPE
4912 && ecode == ARRAY_TYPE
4913 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4914 etype)))
4915 return build1 (VIEW_CONVERT_EXPR, type, expr);
4916
4917 /* If we are converting between tagged types, try to upcast properly. */
4918 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4919 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4920 {
4921 tree child_etype = etype;
4922 do {
4923 tree field = TYPE_FIELDS (child_etype);
4924 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4925 return build_component_ref (expr, NULL_TREE, field, false);
4926 child_etype = TREE_TYPE (field);
4927 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4928 }
4929
4930 /* If we are converting from a smaller form of record type back to it, just
4931 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4932 size on both sides. */
4933 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4934 && smaller_form_type_p (etype, type))
4935 {
4936 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4937 false, false, false, true),
4938 expr);
4939 return build1 (VIEW_CONVERT_EXPR, type, expr);
4940 }
4941
4942 /* In all other cases of related types, make a NOP_EXPR. */
4943 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4944 return fold_convert (type, expr);
4945
4946 switch (code)
4947 {
4948 case VOID_TYPE:
4949 return fold_build1 (CONVERT_EXPR, type, expr);
4950
4951 case INTEGER_TYPE:
4952 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4953 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4954 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4955 return unchecked_convert (type, expr, false);
4956 else if (TYPE_BIASED_REPRESENTATION_P (type))
4957 return fold_convert (type,
4958 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4959 convert (TREE_TYPE (type), expr),
4960 TYPE_MIN_VALUE (type)));
4961
4962 /* ... fall through ... */
4963
4964 case ENUMERAL_TYPE:
4965 case BOOLEAN_TYPE:
4966 /* If we are converting an additive expression to an integer type
4967 with lower precision, be wary of the optimization that can be
4968 applied by convert_to_integer. There are 2 problematic cases:
4969 - if the first operand was originally of a biased type,
4970 because we could be recursively called to convert it
4971 to an intermediate type and thus rematerialize the
4972 additive operator endlessly,
4973 - if the expression contains a placeholder, because an
4974 intermediate conversion that changes the sign could
4975 be inserted and thus introduce an artificial overflow
4976 at compile time when the placeholder is substituted. */
4977 if (code == INTEGER_TYPE
4978 && ecode == INTEGER_TYPE
4979 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4980 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4981 {
4982 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4983
4984 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4985 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4986 || CONTAINS_PLACEHOLDER_P (expr))
4987 return build1 (NOP_EXPR, type, expr);
4988 }
4989
4990 return fold (convert_to_integer (type, expr));
4991
4992 case POINTER_TYPE:
4993 case REFERENCE_TYPE:
4994 /* If converting between two thin pointers, adjust if needed to account
4995 for differing offsets from the base pointer, depending on whether
4996 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4997 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4998 {
4999 tree etype_pos
5000 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
5001 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
5002 : size_zero_node;
5003 tree type_pos
5004 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
5005 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
5006 : size_zero_node;
5007 tree byte_diff = size_diffop (type_pos, etype_pos);
5008
5009 expr = build1 (NOP_EXPR, type, expr);
5010 if (integer_zerop (byte_diff))
5011 return expr;
5012
5013 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
5014 fold_convert (sizetype, byte_diff));
5015 }
5016
5017 /* If converting fat pointer to normal or thin pointer, get the pointer
5018 to the array and then convert it. */
5019 if (TYPE_IS_FAT_POINTER_P (etype))
5020 expr
5021 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
5022
5023 return fold (convert_to_pointer (type, expr));
5024
5025 case REAL_TYPE:
5026 return fold (convert_to_real (type, expr));
5027
5028 case RECORD_TYPE:
5029 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
5030 {
5031 vec<constructor_elt, va_gc> *v;
5032 vec_alloc (v, 1);
5033
5034 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
5035 convert (TREE_TYPE (TYPE_FIELDS (type)),
5036 expr));
5037 return gnat_build_constructor (type, v);
5038 }
5039
5040 /* ... fall through ... */
5041
5042 case ARRAY_TYPE:
5043 /* In these cases, assume the front-end has validated the conversion.
5044 If the conversion is valid, it will be a bit-wise conversion, so
5045 it can be viewed as an unchecked conversion. */
5046 return unchecked_convert (type, expr, false);
5047
5048 case UNION_TYPE:
5049 /* This is a either a conversion between a tagged type and some
5050 subtype, which we have to mark as a UNION_TYPE because of
5051 overlapping fields or a conversion of an Unchecked_Union. */
5052 return unchecked_convert (type, expr, false);
5053
5054 case UNCONSTRAINED_ARRAY_TYPE:
5055 /* If the input is a VECTOR_TYPE, convert to the representative
5056 array type first. */
5057 if (ecode == VECTOR_TYPE)
5058 {
5059 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
5060 etype = TREE_TYPE (expr);
5061 ecode = TREE_CODE (etype);
5062 }
5063
5064 /* If EXPR is a constrained array, take its address, convert it to a
5065 fat pointer, and then dereference it. Likewise if EXPR is a
5066 record containing both a template and a constrained array.
5067 Note that a record representing a justified modular type
5068 always represents a packed constrained array. */
5069 if (ecode == ARRAY_TYPE
5070 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
5071 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
5072 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
5073 return
5074 build_unary_op
5075 (INDIRECT_REF, NULL_TREE,
5076 convert_to_fat_pointer (TREE_TYPE (type),
5077 build_unary_op (ADDR_EXPR,
5078 NULL_TREE, expr)));
5079
5080 /* Do something very similar for converting one unconstrained
5081 array to another. */
5082 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
5083 return
5084 build_unary_op (INDIRECT_REF, NULL_TREE,
5085 convert (TREE_TYPE (type),
5086 build_unary_op (ADDR_EXPR,
5087 NULL_TREE, expr)));
5088 else
5089 gcc_unreachable ();
5090
5091 case COMPLEX_TYPE:
5092 return fold (convert_to_complex (type, expr));
5093
5094 default:
5095 gcc_unreachable ();
5096 }
5097 }
5098
5099 /* Create an expression whose value is that of EXPR converted to the common
5100 index type, which is sizetype. EXPR is supposed to be in the base type
5101 of the GNAT index type. Calling it is equivalent to doing
5102
5103 convert (sizetype, expr)
5104
5105 but we try to distribute the type conversion with the knowledge that EXPR
5106 cannot overflow in its type. This is a best-effort approach and we fall
5107 back to the above expression as soon as difficulties are encountered.
5108
5109 This is necessary to overcome issues that arise when the GNAT base index
5110 type and the GCC common index type (sizetype) don't have the same size,
5111 which is quite frequent on 64-bit architectures. In this case, and if
5112 the GNAT base index type is signed but the iteration type of the loop has
5113 been forced to unsigned, the loop scalar evolution engine cannot compute
5114 a simple evolution for the general induction variables associated with the
5115 array indices, because it will preserve the wrap-around semantics in the
5116 unsigned type of their "inner" part. As a result, many loop optimizations
5117 are blocked.
5118
5119 The solution is to use a special (basic) induction variable that is at
5120 least as large as sizetype, and to express the aforementioned general
5121 induction variables in terms of this induction variable, eliminating
5122 the problematic intermediate truncation to the GNAT base index type.
5123 This is possible as long as the original expression doesn't overflow
5124 and if the middle-end hasn't introduced artificial overflows in the
5125 course of the various simplification it can make to the expression. */
5126
5127 tree
5128 convert_to_index_type (tree expr)
5129 {
5130 enum tree_code code = TREE_CODE (expr);
5131 tree type = TREE_TYPE (expr);
5132
5133 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5134 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5135 if (TYPE_UNSIGNED (type) || !optimize)
5136 return convert (sizetype, expr);
5137
5138 switch (code)
5139 {
5140 case VAR_DECL:
5141 /* The main effect of the function: replace a loop parameter with its
5142 associated special induction variable. */
5143 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
5144 expr = DECL_INDUCTION_VAR (expr);
5145 break;
5146
5147 CASE_CONVERT:
5148 {
5149 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
5150 /* Bail out as soon as we suspect some sort of type frobbing. */
5151 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
5152 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5153 break;
5154 }
5155
5156 /* ... fall through ... */
5157
5158 case NON_LVALUE_EXPR:
5159 return fold_build1 (code, sizetype,
5160 convert_to_index_type (TREE_OPERAND (expr, 0)));
5161
5162 case PLUS_EXPR:
5163 case MINUS_EXPR:
5164 case MULT_EXPR:
5165 return fold_build2 (code, sizetype,
5166 convert_to_index_type (TREE_OPERAND (expr, 0)),
5167 convert_to_index_type (TREE_OPERAND (expr, 1)));
5168
5169 case COMPOUND_EXPR:
5170 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5171 convert_to_index_type (TREE_OPERAND (expr, 1)));
5172
5173 case COND_EXPR:
5174 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5175 convert_to_index_type (TREE_OPERAND (expr, 1)),
5176 convert_to_index_type (TREE_OPERAND (expr, 2)));
5177
5178 default:
5179 break;
5180 }
5181
5182 return convert (sizetype, expr);
5183 }
5184 \f
5185 /* Remove all conversions that are done in EXP. This includes converting
5186 from a padded type or to a justified modular type. If TRUE_ADDRESS
5187 is true, always return the address of the containing object even if
5188 the address is not bit-aligned. */
5189
5190 tree
5191 remove_conversions (tree exp, bool true_address)
5192 {
5193 switch (TREE_CODE (exp))
5194 {
5195 case CONSTRUCTOR:
5196 if (true_address
5197 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5198 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5199 return
5200 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
5201 break;
5202
5203 case COMPONENT_REF:
5204 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5205 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5206 break;
5207
5208 CASE_CONVERT:
5209 case VIEW_CONVERT_EXPR:
5210 case NON_LVALUE_EXPR:
5211 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5212
5213 default:
5214 break;
5215 }
5216
5217 return exp;
5218 }
5219 \f
5220 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5221 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5222 likewise return an expression pointing to the underlying array. */
5223
5224 tree
5225 maybe_unconstrained_array (tree exp)
5226 {
5227 enum tree_code code = TREE_CODE (exp);
5228 tree type = TREE_TYPE (exp);
5229
5230 switch (TREE_CODE (type))
5231 {
5232 case UNCONSTRAINED_ARRAY_TYPE:
5233 if (code == UNCONSTRAINED_ARRAY_REF)
5234 {
5235 const bool read_only = TREE_READONLY (exp);
5236 const bool no_trap = TREE_THIS_NOTRAP (exp);
5237
5238 exp = TREE_OPERAND (exp, 0);
5239 type = TREE_TYPE (exp);
5240
5241 if (TREE_CODE (exp) == COND_EXPR)
5242 {
5243 tree op1
5244 = build_unary_op (INDIRECT_REF, NULL_TREE,
5245 build_component_ref (TREE_OPERAND (exp, 1),
5246 NULL_TREE,
5247 TYPE_FIELDS (type),
5248 false));
5249 tree op2
5250 = build_unary_op (INDIRECT_REF, NULL_TREE,
5251 build_component_ref (TREE_OPERAND (exp, 2),
5252 NULL_TREE,
5253 TYPE_FIELDS (type),
5254 false));
5255
5256 exp = build3 (COND_EXPR,
5257 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5258 TREE_OPERAND (exp, 0), op1, op2);
5259 }
5260 else
5261 {
5262 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5263 build_component_ref (exp, NULL_TREE,
5264 TYPE_FIELDS (type),
5265 false));
5266 TREE_READONLY (exp) = read_only;
5267 TREE_THIS_NOTRAP (exp) = no_trap;
5268 }
5269 }
5270
5271 else if (code == NULL_EXPR)
5272 exp = build1 (NULL_EXPR,
5273 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5274 TREE_OPERAND (exp, 0));
5275 break;
5276
5277 case RECORD_TYPE:
5278 /* If this is a padded type and it contains a template, convert to the
5279 unpadded type first. */
5280 if (TYPE_PADDING_P (type)
5281 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5282 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5283 {
5284 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5285 type = TREE_TYPE (exp);
5286 }
5287
5288 if (TYPE_CONTAINS_TEMPLATE_P (type))
5289 {
5290 exp = build_component_ref (exp, NULL_TREE,
5291 DECL_CHAIN (TYPE_FIELDS (type)),
5292 false);
5293 type = TREE_TYPE (exp);
5294
5295 /* If the array type is padded, convert to the unpadded type. */
5296 if (TYPE_IS_PADDING_P (type))
5297 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5298 }
5299 break;
5300
5301 default:
5302 break;
5303 }
5304
5305 return exp;
5306 }
5307 \f
5308 /* Return true if EXPR is an expression that can be folded as an operand
5309 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5310
5311 static bool
5312 can_fold_for_view_convert_p (tree expr)
5313 {
5314 tree t1, t2;
5315
5316 /* The folder will fold NOP_EXPRs between integral types with the same
5317 precision (in the middle-end's sense). We cannot allow it if the
5318 types don't have the same precision in the Ada sense as well. */
5319 if (TREE_CODE (expr) != NOP_EXPR)
5320 return true;
5321
5322 t1 = TREE_TYPE (expr);
5323 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5324
5325 /* Defer to the folder for non-integral conversions. */
5326 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5327 return true;
5328
5329 /* Only fold conversions that preserve both precisions. */
5330 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5331 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5332 return true;
5333
5334 return false;
5335 }
5336
5337 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5338 If NOTRUNC_P is true, truncation operations should be suppressed.
5339
5340 Special care is required with (source or target) integral types whose
5341 precision is not equal to their size, to make sure we fetch or assign
5342 the value bits whose location might depend on the endianness, e.g.
5343
5344 Rmsize : constant := 8;
5345 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5346
5347 type Bit_Array is array (1 .. Rmsize) of Boolean;
5348 pragma Pack (Bit_Array);
5349
5350 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5351
5352 Value : Int := 2#1000_0001#;
5353 Vbits : Bit_Array := To_Bit_Array (Value);
5354
5355 we expect the 8 bits at Vbits'Address to always contain Value, while
5356 their original location depends on the endianness, at Value'Address
5357 on a little-endian architecture but not on a big-endian one. */
5358
5359 tree
5360 unchecked_convert (tree type, tree expr, bool notrunc_p)
5361 {
5362 tree etype = TREE_TYPE (expr);
5363 enum tree_code ecode = TREE_CODE (etype);
5364 enum tree_code code = TREE_CODE (type);
5365 tree tem;
5366 int c;
5367
5368 /* If the expression is already of the right type, we are done. */
5369 if (etype == type)
5370 return expr;
5371
5372 /* If both types types are integral just do a normal conversion.
5373 Likewise for a conversion to an unconstrained array. */
5374 if ((((INTEGRAL_TYPE_P (type)
5375 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5376 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5377 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5378 && ((INTEGRAL_TYPE_P (etype)
5379 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5380 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5381 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5382 || code == UNCONSTRAINED_ARRAY_TYPE)
5383 {
5384 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5385 {
5386 tree ntype = copy_type (etype);
5387 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5388 TYPE_MAIN_VARIANT (ntype) = ntype;
5389 expr = build1 (NOP_EXPR, ntype, expr);
5390 }
5391
5392 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5393 {
5394 tree rtype = copy_type (type);
5395 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5396 TYPE_MAIN_VARIANT (rtype) = rtype;
5397 expr = convert (rtype, expr);
5398 expr = build1 (NOP_EXPR, type, expr);
5399 }
5400 else
5401 expr = convert (type, expr);
5402 }
5403
5404 /* If we are converting to an integral type whose precision is not equal
5405 to its size, first unchecked convert to a record type that contains an
5406 field of the given precision. Then extract the field. */
5407 else if (INTEGRAL_TYPE_P (type)
5408 && TYPE_RM_SIZE (type)
5409 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5410 GET_MODE_BITSIZE (TYPE_MODE (type))))
5411 {
5412 tree rec_type = make_node (RECORD_TYPE);
5413 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5414 tree field_type, field;
5415
5416 if (TYPE_UNSIGNED (type))
5417 field_type = make_unsigned_type (prec);
5418 else
5419 field_type = make_signed_type (prec);
5420 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5421
5422 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5423 NULL_TREE, bitsize_zero_node, 1, 0);
5424
5425 finish_record_type (rec_type, field, 1, false);
5426
5427 expr = unchecked_convert (rec_type, expr, notrunc_p);
5428 expr = build_component_ref (expr, NULL_TREE, field, false);
5429 expr = fold_build1 (NOP_EXPR, type, expr);
5430 }
5431
5432 /* Similarly if we are converting from an integral type whose precision is
5433 not equal to its size, first copy into a field of the given precision
5434 and unchecked convert the record type. */
5435 else if (INTEGRAL_TYPE_P (etype)
5436 && TYPE_RM_SIZE (etype)
5437 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5438 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5439 {
5440 tree rec_type = make_node (RECORD_TYPE);
5441 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5442 vec<constructor_elt, va_gc> *v;
5443 vec_alloc (v, 1);
5444 tree field_type, field;
5445
5446 if (TYPE_UNSIGNED (etype))
5447 field_type = make_unsigned_type (prec);
5448 else
5449 field_type = make_signed_type (prec);
5450 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5451
5452 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5453 NULL_TREE, bitsize_zero_node, 1, 0);
5454
5455 finish_record_type (rec_type, field, 1, false);
5456
5457 expr = fold_build1 (NOP_EXPR, field_type, expr);
5458 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5459 expr = gnat_build_constructor (rec_type, v);
5460 expr = unchecked_convert (type, expr, notrunc_p);
5461 }
5462
5463 /* If we are converting from a scalar type to a type with a different size,
5464 we need to pad to have the same size on both sides.
5465
5466 ??? We cannot do it unconditionally because unchecked conversions are
5467 used liberally by the front-end to implement polymorphism, e.g. in:
5468
5469 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5470 return p___size__4 (p__object!(S191s.all));
5471
5472 so we skip all expressions that are references. */
5473 else if (!REFERENCE_CLASS_P (expr)
5474 && !AGGREGATE_TYPE_P (etype)
5475 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5476 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5477 {
5478 if (c < 0)
5479 {
5480 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5481 false, false, false, true),
5482 expr);
5483 expr = unchecked_convert (type, expr, notrunc_p);
5484 }
5485 else
5486 {
5487 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5488 false, false, false, true);
5489 expr = unchecked_convert (rec_type, expr, notrunc_p);
5490 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5491 false);
5492 }
5493 }
5494
5495 /* We have a special case when we are converting between two unconstrained
5496 array types. In that case, take the address, convert the fat pointer
5497 types, and dereference. */
5498 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5499 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5500 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5501 build_unary_op (ADDR_EXPR, NULL_TREE,
5502 expr)));
5503
5504 /* Another special case is when we are converting to a vector type from its
5505 representative array type; this a regular conversion. */
5506 else if (code == VECTOR_TYPE
5507 && ecode == ARRAY_TYPE
5508 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5509 etype))
5510 expr = convert (type, expr);
5511
5512 /* And, if the array type is not the representative, we try to build an
5513 intermediate vector type of which the array type is the representative
5514 and to do the unchecked conversion between the vector types, in order
5515 to enable further simplifications in the middle-end. */
5516 else if (code == VECTOR_TYPE
5517 && ecode == ARRAY_TYPE
5518 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
5519 {
5520 expr = convert (tem, expr);
5521 return unchecked_convert (type, expr, notrunc_p);
5522 }
5523
5524 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5525 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5526 else if (TREE_CODE (expr) == CONSTRUCTOR
5527 && code == RECORD_TYPE
5528 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
5529 {
5530 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
5531 Empty, false, false, false, true),
5532 expr);
5533 return unchecked_convert (type, expr, notrunc_p);
5534 }
5535
5536 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5537 else
5538 {
5539 expr = maybe_unconstrained_array (expr);
5540 etype = TREE_TYPE (expr);
5541 ecode = TREE_CODE (etype);
5542 if (can_fold_for_view_convert_p (expr))
5543 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5544 else
5545 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5546 }
5547
5548 /* If the result is an integral type whose precision is not equal to its
5549 size, sign- or zero-extend the result. We need not do this if the input
5550 is an integral type of the same precision and signedness or if the output
5551 is a biased type or if both the input and output are unsigned. */
5552 if (!notrunc_p
5553 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5554 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5555 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5556 GET_MODE_BITSIZE (TYPE_MODE (type)))
5557 && !(INTEGRAL_TYPE_P (etype)
5558 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5559 && operand_equal_p (TYPE_RM_SIZE (type),
5560 (TYPE_RM_SIZE (etype) != 0
5561 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5562 0))
5563 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5564 {
5565 tree base_type
5566 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5567 tree shift_expr
5568 = convert (base_type,
5569 size_binop (MINUS_EXPR,
5570 bitsize_int
5571 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5572 TYPE_RM_SIZE (type)));
5573 expr
5574 = convert (type,
5575 build_binary_op (RSHIFT_EXPR, base_type,
5576 build_binary_op (LSHIFT_EXPR, base_type,
5577 convert (base_type, expr),
5578 shift_expr),
5579 shift_expr));
5580 }
5581
5582 /* An unchecked conversion should never raise Constraint_Error. The code
5583 below assumes that GCC's conversion routines overflow the same way that
5584 the underlying hardware does. This is probably true. In the rare case
5585 when it is false, we can rely on the fact that such conversions are
5586 erroneous anyway. */
5587 if (TREE_CODE (expr) == INTEGER_CST)
5588 TREE_OVERFLOW (expr) = 0;
5589
5590 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5591 show no longer constant. */
5592 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5593 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5594 OEP_ONLY_CONST))
5595 TREE_CONSTANT (expr) = 0;
5596
5597 return expr;
5598 }
5599 \f
5600 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5601 the latter being a record type as predicated by Is_Record_Type. */
5602
5603 enum tree_code
5604 tree_code_for_record_type (Entity_Id gnat_type)
5605 {
5606 Node_Id component_list, component;
5607
5608 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5609 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5610 if (!Is_Unchecked_Union (gnat_type))
5611 return RECORD_TYPE;
5612
5613 gnat_type = Implementation_Base_Type (gnat_type);
5614 component_list
5615 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5616
5617 for (component = First_Non_Pragma (Component_Items (component_list));
5618 Present (component);
5619 component = Next_Non_Pragma (component))
5620 if (Ekind (Defining_Entity (component)) == E_Component)
5621 return RECORD_TYPE;
5622
5623 return UNION_TYPE;
5624 }
5625
5626 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5627 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5628 according to the presence of an alignment clause on the type or, if it
5629 is an array, on the component type. */
5630
5631 bool
5632 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5633 {
5634 gnat_type = Underlying_Type (gnat_type);
5635
5636 *align_clause = Present (Alignment_Clause (gnat_type));
5637
5638 if (Is_Array_Type (gnat_type))
5639 {
5640 gnat_type = Underlying_Type (Component_Type (gnat_type));
5641 if (Present (Alignment_Clause (gnat_type)))
5642 *align_clause = true;
5643 }
5644
5645 if (!Is_Floating_Point_Type (gnat_type))
5646 return false;
5647
5648 if (UI_To_Int (Esize (gnat_type)) != 64)
5649 return false;
5650
5651 return true;
5652 }
5653
5654 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5655 size is greater or equal to 64 bits, or an array of such a type. Set
5656 ALIGN_CLAUSE according to the presence of an alignment clause on the
5657 type or, if it is an array, on the component type. */
5658
5659 bool
5660 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5661 {
5662 gnat_type = Underlying_Type (gnat_type);
5663
5664 *align_clause = Present (Alignment_Clause (gnat_type));
5665
5666 if (Is_Array_Type (gnat_type))
5667 {
5668 gnat_type = Underlying_Type (Component_Type (gnat_type));
5669 if (Present (Alignment_Clause (gnat_type)))
5670 *align_clause = true;
5671 }
5672
5673 if (!Is_Scalar_Type (gnat_type))
5674 return false;
5675
5676 if (UI_To_Int (Esize (gnat_type)) < 64)
5677 return false;
5678
5679 return true;
5680 }
5681
5682 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5683 component of an aggregate type. */
5684
5685 bool
5686 type_for_nonaliased_component_p (tree gnu_type)
5687 {
5688 /* If the type is passed by reference, we may have pointers to the
5689 component so it cannot be made non-aliased. */
5690 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5691 return false;
5692
5693 /* We used to say that any component of aggregate type is aliased
5694 because the front-end may take 'Reference of it. The front-end
5695 has been enhanced in the meantime so as to use a renaming instead
5696 in most cases, but the back-end can probably take the address of
5697 such a component too so we go for the conservative stance.
5698
5699 For instance, we might need the address of any array type, even
5700 if normally passed by copy, to construct a fat pointer if the
5701 component is used as an actual for an unconstrained formal.
5702
5703 Likewise for record types: even if a specific record subtype is
5704 passed by copy, the parent type might be passed by ref (e.g. if
5705 it's of variable size) and we might take the address of a child
5706 component to pass to a parent formal. We have no way to check
5707 for such conditions here. */
5708 if (AGGREGATE_TYPE_P (gnu_type))
5709 return false;
5710
5711 return true;
5712 }
5713
5714 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5715
5716 bool
5717 smaller_form_type_p (tree type, tree orig_type)
5718 {
5719 tree size, osize;
5720
5721 /* We're not interested in variants here. */
5722 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5723 return false;
5724
5725 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5726 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5727 return false;
5728
5729 size = TYPE_SIZE (type);
5730 osize = TYPE_SIZE (orig_type);
5731
5732 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5733 return false;
5734
5735 return tree_int_cst_lt (size, osize) != 0;
5736 }
5737
5738 /* Perform final processing on global variables. */
5739
5740 static GTY (()) tree dummy_global;
5741
5742 void
5743 gnat_write_global_declarations (void)
5744 {
5745 unsigned int i;
5746 tree iter;
5747
5748 /* If we have declared types as used at the global level, insert them in
5749 the global hash table. We use a dummy variable for this purpose. */
5750 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5751 {
5752 struct varpool_node *node;
5753 char *label;
5754
5755 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5756 dummy_global
5757 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5758 void_type_node);
5759 TREE_STATIC (dummy_global) = 1;
5760 TREE_ASM_WRITTEN (dummy_global) = 1;
5761 node = varpool_node_for_decl (dummy_global);
5762 node->force_output = 1;
5763
5764 while (!types_used_by_cur_var_decl->is_empty ())
5765 {
5766 tree t = types_used_by_cur_var_decl->pop ();
5767 types_used_by_var_decl_insert (t, dummy_global);
5768 }
5769 }
5770
5771 /* Output debug information for all global type declarations first. This
5772 ensures that global types whose compilation hasn't been finalized yet,
5773 for example pointers to Taft amendment types, have their compilation
5774 finalized in the right context. */
5775 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5776 if (TREE_CODE (iter) == TYPE_DECL)
5777 debug_hooks->global_decl (iter);
5778
5779 /* Proceed to optimize and emit assembly. */
5780 finalize_compilation_unit ();
5781
5782 /* After cgraph has had a chance to emit everything that's going to
5783 be emitted, output debug information for the rest of globals. */
5784 if (!seen_error ())
5785 {
5786 timevar_push (TV_SYMOUT);
5787 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5788 if (TREE_CODE (iter) != TYPE_DECL)
5789 debug_hooks->global_decl (iter);
5790 timevar_pop (TV_SYMOUT);
5791 }
5792 }
5793
5794 /* ************************************************************************
5795 * * GCC builtins support *
5796 * ************************************************************************ */
5797
5798 /* The general scheme is fairly simple:
5799
5800 For each builtin function/type to be declared, gnat_install_builtins calls
5801 internal facilities which eventually get to gnat_push_decl, which in turn
5802 tracks the so declared builtin function decls in the 'builtin_decls' global
5803 datastructure. When an Intrinsic subprogram declaration is processed, we
5804 search this global datastructure to retrieve the associated BUILT_IN DECL
5805 node. */
5806
5807 /* Search the chain of currently available builtin declarations for a node
5808 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5809 found, if any, or NULL_TREE otherwise. */
5810 tree
5811 builtin_decl_for (tree name)
5812 {
5813 unsigned i;
5814 tree decl;
5815
5816 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5817 if (DECL_NAME (decl) == name)
5818 return decl;
5819
5820 return NULL_TREE;
5821 }
5822
5823 /* The code below eventually exposes gnat_install_builtins, which declares
5824 the builtin types and functions we might need, either internally or as
5825 user accessible facilities.
5826
5827 ??? This is a first implementation shot, still in rough shape. It is
5828 heavily inspired from the "C" family implementation, with chunks copied
5829 verbatim from there.
5830
5831 Two obvious TODO candidates are
5832 o Use a more efficient name/decl mapping scheme
5833 o Devise a middle-end infrastructure to avoid having to copy
5834 pieces between front-ends. */
5835
5836 /* ----------------------------------------------------------------------- *
5837 * BUILTIN ELEMENTARY TYPES *
5838 * ----------------------------------------------------------------------- */
5839
5840 /* Standard data types to be used in builtin argument declarations. */
5841
5842 enum c_tree_index
5843 {
5844 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5845 CTI_STRING_TYPE,
5846 CTI_CONST_STRING_TYPE,
5847
5848 CTI_MAX
5849 };
5850
5851 static tree c_global_trees[CTI_MAX];
5852
5853 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5854 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5855 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5856
5857 /* ??? In addition some attribute handlers, we currently don't support a
5858 (small) number of builtin-types, which in turns inhibits support for a
5859 number of builtin functions. */
5860 #define wint_type_node void_type_node
5861 #define intmax_type_node void_type_node
5862 #define uintmax_type_node void_type_node
5863
5864 /* Build the void_list_node (void_type_node having been created). */
5865
5866 static tree
5867 build_void_list_node (void)
5868 {
5869 tree t = build_tree_list (NULL_TREE, void_type_node);
5870 return t;
5871 }
5872
5873 /* Used to help initialize the builtin-types.def table. When a type of
5874 the correct size doesn't exist, use error_mark_node instead of NULL.
5875 The later results in segfaults even when a decl using the type doesn't
5876 get invoked. */
5877
5878 static tree
5879 builtin_type_for_size (int size, bool unsignedp)
5880 {
5881 tree type = gnat_type_for_size (size, unsignedp);
5882 return type ? type : error_mark_node;
5883 }
5884
5885 /* Build/push the elementary type decls that builtin functions/types
5886 will need. */
5887
5888 static void
5889 install_builtin_elementary_types (void)
5890 {
5891 signed_size_type_node = gnat_signed_type (size_type_node);
5892 pid_type_node = integer_type_node;
5893 void_list_node = build_void_list_node ();
5894
5895 string_type_node = build_pointer_type (char_type_node);
5896 const_string_type_node
5897 = build_pointer_type (build_qualified_type
5898 (char_type_node, TYPE_QUAL_CONST));
5899 }
5900
5901 /* ----------------------------------------------------------------------- *
5902 * BUILTIN FUNCTION TYPES *
5903 * ----------------------------------------------------------------------- */
5904
5905 /* Now, builtin function types per se. */
5906
5907 enum c_builtin_type
5908 {
5909 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5910 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5911 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5912 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5913 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5914 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5915 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5916 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5917 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5918 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5919 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5920 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5921 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5922 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5923 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5924 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5925 NAME,
5926 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5927 #include "builtin-types.def"
5928 #undef DEF_PRIMITIVE_TYPE
5929 #undef DEF_FUNCTION_TYPE_0
5930 #undef DEF_FUNCTION_TYPE_1
5931 #undef DEF_FUNCTION_TYPE_2
5932 #undef DEF_FUNCTION_TYPE_3
5933 #undef DEF_FUNCTION_TYPE_4
5934 #undef DEF_FUNCTION_TYPE_5
5935 #undef DEF_FUNCTION_TYPE_6
5936 #undef DEF_FUNCTION_TYPE_7
5937 #undef DEF_FUNCTION_TYPE_8
5938 #undef DEF_FUNCTION_TYPE_VAR_0
5939 #undef DEF_FUNCTION_TYPE_VAR_1
5940 #undef DEF_FUNCTION_TYPE_VAR_2
5941 #undef DEF_FUNCTION_TYPE_VAR_3
5942 #undef DEF_FUNCTION_TYPE_VAR_4
5943 #undef DEF_FUNCTION_TYPE_VAR_5
5944 #undef DEF_POINTER_TYPE
5945 BT_LAST
5946 };
5947
5948 typedef enum c_builtin_type builtin_type;
5949
5950 /* A temporary array used in communication with def_fn_type. */
5951 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5952
5953 /* A helper function for install_builtin_types. Build function type
5954 for DEF with return type RET and N arguments. If VAR is true, then the
5955 function should be variadic after those N arguments.
5956
5957 Takes special care not to ICE if any of the types involved are
5958 error_mark_node, which indicates that said type is not in fact available
5959 (see builtin_type_for_size). In which case the function type as a whole
5960 should be error_mark_node. */
5961
5962 static void
5963 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5964 {
5965 tree t;
5966 tree *args = XALLOCAVEC (tree, n);
5967 va_list list;
5968 int i;
5969
5970 va_start (list, n);
5971 for (i = 0; i < n; ++i)
5972 {
5973 builtin_type a = (builtin_type) va_arg (list, int);
5974 t = builtin_types[a];
5975 if (t == error_mark_node)
5976 goto egress;
5977 args[i] = t;
5978 }
5979
5980 t = builtin_types[ret];
5981 if (t == error_mark_node)
5982 goto egress;
5983 if (var)
5984 t = build_varargs_function_type_array (t, n, args);
5985 else
5986 t = build_function_type_array (t, n, args);
5987
5988 egress:
5989 builtin_types[def] = t;
5990 va_end (list);
5991 }
5992
5993 /* Build the builtin function types and install them in the builtin_types
5994 array for later use in builtin function decls. */
5995
5996 static void
5997 install_builtin_function_types (void)
5998 {
5999 tree va_list_ref_type_node;
6000 tree va_list_arg_type_node;
6001
6002 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
6003 {
6004 va_list_arg_type_node = va_list_ref_type_node =
6005 build_pointer_type (TREE_TYPE (va_list_type_node));
6006 }
6007 else
6008 {
6009 va_list_arg_type_node = va_list_type_node;
6010 va_list_ref_type_node = build_reference_type (va_list_type_node);
6011 }
6012
6013 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6014 builtin_types[ENUM] = VALUE;
6015 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6016 def_fn_type (ENUM, RETURN, 0, 0);
6017 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6018 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6019 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6020 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6021 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6022 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6023 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6024 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6025 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6026 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6027 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6028 ARG6) \
6029 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6030 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6031 ARG6, ARG7) \
6032 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6033 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6034 ARG6, ARG7, ARG8) \
6035 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6036 ARG7, ARG8);
6037 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6038 def_fn_type (ENUM, RETURN, 1, 0);
6039 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6040 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6041 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6042 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6043 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6044 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6045 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6046 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6047 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6048 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6049 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6050 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6051
6052 #include "builtin-types.def"
6053
6054 #undef DEF_PRIMITIVE_TYPE
6055 #undef DEF_FUNCTION_TYPE_1
6056 #undef DEF_FUNCTION_TYPE_2
6057 #undef DEF_FUNCTION_TYPE_3
6058 #undef DEF_FUNCTION_TYPE_4
6059 #undef DEF_FUNCTION_TYPE_5
6060 #undef DEF_FUNCTION_TYPE_6
6061 #undef DEF_FUNCTION_TYPE_VAR_0
6062 #undef DEF_FUNCTION_TYPE_VAR_1
6063 #undef DEF_FUNCTION_TYPE_VAR_2
6064 #undef DEF_FUNCTION_TYPE_VAR_3
6065 #undef DEF_FUNCTION_TYPE_VAR_4
6066 #undef DEF_FUNCTION_TYPE_VAR_5
6067 #undef DEF_POINTER_TYPE
6068 builtin_types[(int) BT_LAST] = NULL_TREE;
6069 }
6070
6071 /* ----------------------------------------------------------------------- *
6072 * BUILTIN ATTRIBUTES *
6073 * ----------------------------------------------------------------------- */
6074
6075 enum built_in_attribute
6076 {
6077 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6078 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6079 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6080 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6081 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6082 #include "builtin-attrs.def"
6083 #undef DEF_ATTR_NULL_TREE
6084 #undef DEF_ATTR_INT
6085 #undef DEF_ATTR_STRING
6086 #undef DEF_ATTR_IDENT
6087 #undef DEF_ATTR_TREE_LIST
6088 ATTR_LAST
6089 };
6090
6091 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
6092
6093 static void
6094 install_builtin_attributes (void)
6095 {
6096 /* Fill in the built_in_attributes array. */
6097 #define DEF_ATTR_NULL_TREE(ENUM) \
6098 built_in_attributes[(int) ENUM] = NULL_TREE;
6099 #define DEF_ATTR_INT(ENUM, VALUE) \
6100 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6101 #define DEF_ATTR_STRING(ENUM, VALUE) \
6102 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6103 #define DEF_ATTR_IDENT(ENUM, STRING) \
6104 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6105 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6106 built_in_attributes[(int) ENUM] \
6107 = tree_cons (built_in_attributes[(int) PURPOSE], \
6108 built_in_attributes[(int) VALUE], \
6109 built_in_attributes[(int) CHAIN]);
6110 #include "builtin-attrs.def"
6111 #undef DEF_ATTR_NULL_TREE
6112 #undef DEF_ATTR_INT
6113 #undef DEF_ATTR_STRING
6114 #undef DEF_ATTR_IDENT
6115 #undef DEF_ATTR_TREE_LIST
6116 }
6117
6118 /* Handle a "const" attribute; arguments as in
6119 struct attribute_spec.handler. */
6120
6121 static tree
6122 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
6123 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6124 bool *no_add_attrs)
6125 {
6126 if (TREE_CODE (*node) == FUNCTION_DECL)
6127 TREE_READONLY (*node) = 1;
6128 else
6129 *no_add_attrs = true;
6130
6131 return NULL_TREE;
6132 }
6133
6134 /* Handle a "nothrow" attribute; arguments as in
6135 struct attribute_spec.handler. */
6136
6137 static tree
6138 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
6139 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6140 bool *no_add_attrs)
6141 {
6142 if (TREE_CODE (*node) == FUNCTION_DECL)
6143 TREE_NOTHROW (*node) = 1;
6144 else
6145 *no_add_attrs = true;
6146
6147 return NULL_TREE;
6148 }
6149
6150 /* Handle a "pure" attribute; arguments as in
6151 struct attribute_spec.handler. */
6152
6153 static tree
6154 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6155 int ARG_UNUSED (flags), bool *no_add_attrs)
6156 {
6157 if (TREE_CODE (*node) == FUNCTION_DECL)
6158 DECL_PURE_P (*node) = 1;
6159 /* ??? TODO: Support types. */
6160 else
6161 {
6162 warning (OPT_Wattributes, "%qs attribute ignored",
6163 IDENTIFIER_POINTER (name));
6164 *no_add_attrs = true;
6165 }
6166
6167 return NULL_TREE;
6168 }
6169
6170 /* Handle a "no vops" attribute; arguments as in
6171 struct attribute_spec.handler. */
6172
6173 static tree
6174 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
6175 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6176 bool *ARG_UNUSED (no_add_attrs))
6177 {
6178 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
6179 DECL_IS_NOVOPS (*node) = 1;
6180 return NULL_TREE;
6181 }
6182
6183 /* Helper for nonnull attribute handling; fetch the operand number
6184 from the attribute argument list. */
6185
6186 static bool
6187 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6188 {
6189 /* Verify the arg number is a constant. */
6190 if (!tree_fits_uhwi_p (arg_num_expr))
6191 return false;
6192
6193 *valp = TREE_INT_CST_LOW (arg_num_expr);
6194 return true;
6195 }
6196
6197 /* Handle the "nonnull" attribute. */
6198 static tree
6199 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6200 tree args, int ARG_UNUSED (flags),
6201 bool *no_add_attrs)
6202 {
6203 tree type = *node;
6204 unsigned HOST_WIDE_INT attr_arg_num;
6205
6206 /* If no arguments are specified, all pointer arguments should be
6207 non-null. Verify a full prototype is given so that the arguments
6208 will have the correct types when we actually check them later. */
6209 if (!args)
6210 {
6211 if (!prototype_p (type))
6212 {
6213 error ("nonnull attribute without arguments on a non-prototype");
6214 *no_add_attrs = true;
6215 }
6216 return NULL_TREE;
6217 }
6218
6219 /* Argument list specified. Verify that each argument number references
6220 a pointer argument. */
6221 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6222 {
6223 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6224
6225 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6226 {
6227 error ("nonnull argument has invalid operand number (argument %lu)",
6228 (unsigned long) attr_arg_num);
6229 *no_add_attrs = true;
6230 return NULL_TREE;
6231 }
6232
6233 if (prototype_p (type))
6234 {
6235 function_args_iterator iter;
6236 tree argument;
6237
6238 function_args_iter_init (&iter, type);
6239 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6240 {
6241 argument = function_args_iter_cond (&iter);
6242 if (!argument || ck_num == arg_num)
6243 break;
6244 }
6245
6246 if (!argument
6247 || TREE_CODE (argument) == VOID_TYPE)
6248 {
6249 error ("nonnull argument with out-of-range operand number "
6250 "(argument %lu, operand %lu)",
6251 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6252 *no_add_attrs = true;
6253 return NULL_TREE;
6254 }
6255
6256 if (TREE_CODE (argument) != POINTER_TYPE)
6257 {
6258 error ("nonnull argument references non-pointer operand "
6259 "(argument %lu, operand %lu)",
6260 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6261 *no_add_attrs = true;
6262 return NULL_TREE;
6263 }
6264 }
6265 }
6266
6267 return NULL_TREE;
6268 }
6269
6270 /* Handle a "sentinel" attribute. */
6271
6272 static tree
6273 handle_sentinel_attribute (tree *node, tree name, tree args,
6274 int ARG_UNUSED (flags), bool *no_add_attrs)
6275 {
6276 if (!prototype_p (*node))
6277 {
6278 warning (OPT_Wattributes,
6279 "%qs attribute requires prototypes with named arguments",
6280 IDENTIFIER_POINTER (name));
6281 *no_add_attrs = true;
6282 }
6283 else
6284 {
6285 if (!stdarg_p (*node))
6286 {
6287 warning (OPT_Wattributes,
6288 "%qs attribute only applies to variadic functions",
6289 IDENTIFIER_POINTER (name));
6290 *no_add_attrs = true;
6291 }
6292 }
6293
6294 if (args)
6295 {
6296 tree position = TREE_VALUE (args);
6297
6298 if (TREE_CODE (position) != INTEGER_CST)
6299 {
6300 warning (0, "requested position is not an integer constant");
6301 *no_add_attrs = true;
6302 }
6303 else
6304 {
6305 if (tree_int_cst_lt (position, integer_zero_node))
6306 {
6307 warning (0, "requested position is less than zero");
6308 *no_add_attrs = true;
6309 }
6310 }
6311 }
6312
6313 return NULL_TREE;
6314 }
6315
6316 /* Handle a "noreturn" attribute; arguments as in
6317 struct attribute_spec.handler. */
6318
6319 static tree
6320 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6321 int ARG_UNUSED (flags), bool *no_add_attrs)
6322 {
6323 tree type = TREE_TYPE (*node);
6324
6325 /* See FIXME comment in c_common_attribute_table. */
6326 if (TREE_CODE (*node) == FUNCTION_DECL)
6327 TREE_THIS_VOLATILE (*node) = 1;
6328 else if (TREE_CODE (type) == POINTER_TYPE
6329 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6330 TREE_TYPE (*node)
6331 = build_pointer_type
6332 (build_type_variant (TREE_TYPE (type),
6333 TYPE_READONLY (TREE_TYPE (type)), 1));
6334 else
6335 {
6336 warning (OPT_Wattributes, "%qs attribute ignored",
6337 IDENTIFIER_POINTER (name));
6338 *no_add_attrs = true;
6339 }
6340
6341 return NULL_TREE;
6342 }
6343
6344 /* Handle a "leaf" attribute; arguments as in
6345 struct attribute_spec.handler. */
6346
6347 static tree
6348 handle_leaf_attribute (tree *node, tree name,
6349 tree ARG_UNUSED (args),
6350 int ARG_UNUSED (flags), bool *no_add_attrs)
6351 {
6352 if (TREE_CODE (*node) != FUNCTION_DECL)
6353 {
6354 warning (OPT_Wattributes, "%qE attribute ignored", name);
6355 *no_add_attrs = true;
6356 }
6357 if (!TREE_PUBLIC (*node))
6358 {
6359 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6360 *no_add_attrs = true;
6361 }
6362
6363 return NULL_TREE;
6364 }
6365
6366 /* Handle a "malloc" attribute; arguments as in
6367 struct attribute_spec.handler. */
6368
6369 static tree
6370 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6371 int ARG_UNUSED (flags), bool *no_add_attrs)
6372 {
6373 if (TREE_CODE (*node) == FUNCTION_DECL
6374 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6375 DECL_IS_MALLOC (*node) = 1;
6376 else
6377 {
6378 warning (OPT_Wattributes, "%qs attribute ignored",
6379 IDENTIFIER_POINTER (name));
6380 *no_add_attrs = true;
6381 }
6382
6383 return NULL_TREE;
6384 }
6385
6386 /* Fake handler for attributes we don't properly support. */
6387
6388 tree
6389 fake_attribute_handler (tree * ARG_UNUSED (node),
6390 tree ARG_UNUSED (name),
6391 tree ARG_UNUSED (args),
6392 int ARG_UNUSED (flags),
6393 bool * ARG_UNUSED (no_add_attrs))
6394 {
6395 return NULL_TREE;
6396 }
6397
6398 /* Handle a "type_generic" attribute. */
6399
6400 static tree
6401 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6402 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6403 bool * ARG_UNUSED (no_add_attrs))
6404 {
6405 /* Ensure we have a function type. */
6406 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6407
6408 /* Ensure we have a variadic function. */
6409 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6410
6411 return NULL_TREE;
6412 }
6413
6414 /* Handle a "vector_size" attribute; arguments as in
6415 struct attribute_spec.handler. */
6416
6417 static tree
6418 handle_vector_size_attribute (tree *node, tree name, tree args,
6419 int ARG_UNUSED (flags), bool *no_add_attrs)
6420 {
6421 tree type = *node;
6422 tree vector_type;
6423
6424 *no_add_attrs = true;
6425
6426 /* We need to provide for vector pointers, vector arrays, and
6427 functions returning vectors. For example:
6428
6429 __attribute__((vector_size(16))) short *foo;
6430
6431 In this case, the mode is SI, but the type being modified is
6432 HI, so we need to look further. */
6433 while (POINTER_TYPE_P (type)
6434 || TREE_CODE (type) == FUNCTION_TYPE
6435 || TREE_CODE (type) == ARRAY_TYPE)
6436 type = TREE_TYPE (type);
6437
6438 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
6439 if (!vector_type)
6440 return NULL_TREE;
6441
6442 /* Build back pointers if needed. */
6443 *node = reconstruct_complex_type (*node, vector_type);
6444
6445 return NULL_TREE;
6446 }
6447
6448 /* Handle a "vector_type" attribute; arguments as in
6449 struct attribute_spec.handler. */
6450
6451 static tree
6452 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6453 int ARG_UNUSED (flags), bool *no_add_attrs)
6454 {
6455 tree type = *node;
6456 tree vector_type;
6457
6458 *no_add_attrs = true;
6459
6460 if (TREE_CODE (type) != ARRAY_TYPE)
6461 {
6462 error ("attribute %qs applies to array types only",
6463 IDENTIFIER_POINTER (name));
6464 return NULL_TREE;
6465 }
6466
6467 vector_type = build_vector_type_for_array (type, name);
6468 if (!vector_type)
6469 return NULL_TREE;
6470
6471 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
6472 *node = vector_type;
6473
6474 return NULL_TREE;
6475 }
6476
6477 /* ----------------------------------------------------------------------- *
6478 * BUILTIN FUNCTIONS *
6479 * ----------------------------------------------------------------------- */
6480
6481 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6482 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6483 if nonansi_p and flag_no_nonansi_builtin. */
6484
6485 static void
6486 def_builtin_1 (enum built_in_function fncode,
6487 const char *name,
6488 enum built_in_class fnclass,
6489 tree fntype, tree libtype,
6490 bool both_p, bool fallback_p,
6491 bool nonansi_p ATTRIBUTE_UNUSED,
6492 tree fnattrs, bool implicit_p)
6493 {
6494 tree decl;
6495 const char *libname;
6496
6497 /* Preserve an already installed decl. It most likely was setup in advance
6498 (e.g. as part of the internal builtins) for specific reasons. */
6499 if (builtin_decl_explicit (fncode) != NULL_TREE)
6500 return;
6501
6502 gcc_assert ((!both_p && !fallback_p)
6503 || !strncmp (name, "__builtin_",
6504 strlen ("__builtin_")));
6505
6506 libname = name + strlen ("__builtin_");
6507 decl = add_builtin_function (name, fntype, fncode, fnclass,
6508 (fallback_p ? libname : NULL),
6509 fnattrs);
6510 if (both_p)
6511 /* ??? This is normally further controlled by command-line options
6512 like -fno-builtin, but we don't have them for Ada. */
6513 add_builtin_function (libname, libtype, fncode, fnclass,
6514 NULL, fnattrs);
6515
6516 set_builtin_decl (fncode, decl, implicit_p);
6517 }
6518
6519 static int flag_isoc94 = 0;
6520 static int flag_isoc99 = 0;
6521 static int flag_isoc11 = 0;
6522
6523 /* Install what the common builtins.def offers. */
6524
6525 static void
6526 install_builtin_functions (void)
6527 {
6528 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6529 NONANSI_P, ATTRS, IMPLICIT, COND) \
6530 if (NAME && COND) \
6531 def_builtin_1 (ENUM, NAME, CLASS, \
6532 builtin_types[(int) TYPE], \
6533 builtin_types[(int) LIBTYPE], \
6534 BOTH_P, FALLBACK_P, NONANSI_P, \
6535 built_in_attributes[(int) ATTRS], IMPLICIT);
6536 #include "builtins.def"
6537 #undef DEF_BUILTIN
6538 }
6539
6540 /* ----------------------------------------------------------------------- *
6541 * BUILTIN FUNCTIONS *
6542 * ----------------------------------------------------------------------- */
6543
6544 /* Install the builtin functions we might need. */
6545
6546 void
6547 gnat_install_builtins (void)
6548 {
6549 install_builtin_elementary_types ();
6550 install_builtin_function_types ();
6551 install_builtin_attributes ();
6552
6553 /* Install builtins used by generic middle-end pieces first. Some of these
6554 know about internal specificities and control attributes accordingly, for
6555 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6556 the generic definition from builtins.def. */
6557 build_common_builtin_nodes ();
6558
6559 /* Now, install the target specific builtins, such as the AltiVec family on
6560 ppc, and the common set as exposed by builtins.def. */
6561 targetm.init_builtins ();
6562 install_builtin_functions ();
6563 }
6564
6565 #include "gt-ada-utils.h"
6566 #include "gtype-ada.h"