]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils.c
* dwarf2asm.c (dw2_asm_output_encoded_addr_rtx): Add missing
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2011, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "langhooks.h"
40 #include "cgraph.h"
41 #include "tree-dump.h"
42 #include "tree-inline.h"
43 #include "tree-iterator.h"
44
45 #include "ada.h"
46 #include "types.h"
47 #include "atree.h"
48 #include "elists.h"
49 #include "namet.h"
50 #include "nlists.h"
51 #include "stringt.h"
52 #include "uintp.h"
53 #include "fe.h"
54 #include "sinfo.h"
55 #include "einfo.h"
56 #include "ada-tree.h"
57 #include "gigi.h"
58
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
62
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
65
66 /* The default alignment of "double" floating-point types, i.e. floating
67 point types whose size is equal to 64 bits, or 0 if this alignment is
68 not specifically capped. */
69 int double_float_alignment;
70
71 /* The default alignment of "double" or larger scalar types, i.e. scalar
72 types whose size is greater or equal to 64 bits, or 0 if this alignment
73 is not specifically capped. */
74 int double_scalar_alignment;
75
76 /* Tree nodes for the various types and decls we create. */
77 tree gnat_std_decls[(int) ADT_LAST];
78
79 /* Functions to call for each of the possible raise reasons. */
80 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
81
82 /* Likewise, but with extra info for each of the possible raise reasons. */
83 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
84
85 /* Forward declarations for handlers of attributes. */
86 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
98
99 /* Fake handler for attributes we don't properly support, typically because
100 they'd require dragging a lot of the common-c front-end circuitry. */
101 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
102
103 /* Table of machine-independent internal attributes for Ada. We support
104 this minimal set of attributes to accommodate the needs of builtins. */
105 const struct attribute_spec gnat_internal_attribute_table[] =
106 {
107 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
108 affects_type_identity } */
109 { "const", 0, 0, true, false, false, handle_const_attribute,
110 false },
111 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
112 false },
113 { "pure", 0, 0, true, false, false, handle_pure_attribute,
114 false },
115 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
116 false },
117 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
118 false },
119 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
120 false },
121 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
122 false },
123 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
124 false },
125 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
126 false },
127 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
128 false },
129
130 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
131 false },
132 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
133 false },
134 { "may_alias", 0, 0, false, true, false, NULL, false },
135
136 /* ??? format and format_arg are heavy and not supported, which actually
137 prevents support for stdio builtins, which we however declare as part
138 of the common builtins.def contents. */
139 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
140 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
141
142 { NULL, 0, 0, false, false, false, NULL, false }
143 };
144
145 /* Associates a GNAT tree node to a GCC tree node. It is used in
146 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
147 of `save_gnu_tree' for more info. */
148 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
149
150 #define GET_GNU_TREE(GNAT_ENTITY) \
151 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
152
153 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155
156 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
157 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158
159 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
160 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
161
162 #define GET_DUMMY_NODE(GNAT_ENTITY) \
163 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
164
165 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167
168 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
169 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170
171 /* This variable keeps a table for types for each precision so that we only
172 allocate each of them once. Signed and unsigned types are kept separate.
173
174 Note that these types are only used when fold-const requests something
175 special. Perhaps we should NOT share these types; we'll see how it
176 goes later. */
177 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
178
179 /* Likewise for float types, but record these by mode. */
180 static GTY(()) tree float_types[NUM_MACHINE_MODES];
181
182 /* For each binding contour we allocate a binding_level structure to indicate
183 the binding depth. */
184
185 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
186 /* The binding level containing this one (the enclosing binding level). */
187 struct gnat_binding_level *chain;
188 /* The BLOCK node for this level. */
189 tree block;
190 /* If nonzero, the setjmp buffer that needs to be updated for any
191 variable-sized definition within this context. */
192 tree jmpbuf_decl;
193 };
194
195 /* The binding level currently in effect. */
196 static GTY(()) struct gnat_binding_level *current_binding_level;
197
198 /* A chain of gnat_binding_level structures awaiting reuse. */
199 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
200
201 /* An array of global declarations. */
202 static GTY(()) VEC(tree,gc) *global_decls;
203
204 /* An array of builtin function declarations. */
205 static GTY(()) VEC(tree,gc) *builtin_decls;
206
207 /* An array of global renaming pointers. */
208 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
209
210 /* A chain of unused BLOCK nodes. */
211 static GTY((deletable)) tree free_block_chain;
212
213 static tree merge_sizes (tree, tree, tree, bool, bool);
214 static tree compute_related_constant (tree, tree);
215 static tree split_plus (tree, tree *);
216 static tree float_type_for_precision (int, enum machine_mode);
217 static tree convert_to_fat_pointer (tree, tree);
218 static tree convert_to_thin_pointer (tree, tree);
219 static bool potential_alignment_gap (tree, tree, tree);
220 static void process_attributes (tree, struct attrib *);
221 \f
222 /* Initialize the association of GNAT nodes to GCC trees. */
223
224 void
225 init_gnat_to_gnu (void)
226 {
227 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
228 }
229
230 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
231 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
232 If NO_CHECK is true, the latter check is suppressed.
233
234 If GNU_DECL is zero, reset a previous association. */
235
236 void
237 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
238 {
239 /* Check that GNAT_ENTITY is not already defined and that it is being set
240 to something which is a decl. If that is not the case, this usually
241 means GNAT_ENTITY is defined twice, but occasionally is due to some
242 Gigi problem. */
243 gcc_assert (!(gnu_decl
244 && (PRESENT_GNU_TREE (gnat_entity)
245 || (!no_check && !DECL_P (gnu_decl)))));
246
247 SET_GNU_TREE (gnat_entity, gnu_decl);
248 }
249
250 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
251 that was associated with it. If there is no such tree node, abort.
252
253 In some cases, such as delayed elaboration or expressions that need to
254 be elaborated only once, GNAT_ENTITY is really not an entity. */
255
256 tree
257 get_gnu_tree (Entity_Id gnat_entity)
258 {
259 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
260 return GET_GNU_TREE (gnat_entity);
261 }
262
263 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
264
265 bool
266 present_gnu_tree (Entity_Id gnat_entity)
267 {
268 return PRESENT_GNU_TREE (gnat_entity);
269 }
270 \f
271 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
272
273 void
274 init_dummy_type (void)
275 {
276 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
277 }
278
279 /* Make a dummy type corresponding to GNAT_TYPE. */
280
281 tree
282 make_dummy_type (Entity_Id gnat_type)
283 {
284 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
285 tree gnu_type;
286
287 /* If there is an equivalent type, get its underlying type. */
288 if (Present (gnat_underlying))
289 gnat_underlying = Underlying_Type (gnat_underlying);
290
291 /* If there was no equivalent type (can only happen when just annotating
292 types) or underlying type, go back to the original type. */
293 if (No (gnat_underlying))
294 gnat_underlying = gnat_type;
295
296 /* If it there already a dummy type, use that one. Else make one. */
297 if (PRESENT_DUMMY_NODE (gnat_underlying))
298 return GET_DUMMY_NODE (gnat_underlying);
299
300 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
301 an ENUMERAL_TYPE. */
302 gnu_type = make_node (Is_Record_Type (gnat_underlying)
303 ? tree_code_for_record_type (gnat_underlying)
304 : ENUMERAL_TYPE);
305 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
306 TYPE_DUMMY_P (gnu_type) = 1;
307 TYPE_STUB_DECL (gnu_type)
308 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
309 if (Is_By_Reference_Type (gnat_type))
310 TREE_ADDRESSABLE (gnu_type) = 1;
311
312 SET_DUMMY_NODE (gnat_underlying, gnu_type);
313
314 return gnu_type;
315 }
316 \f
317 /* Return nonzero if we are currently in the global binding level. */
318
319 int
320 global_bindings_p (void)
321 {
322 return ((force_global || !current_function_decl) ? -1 : 0);
323 }
324
325 /* Enter a new binding level. */
326
327 void
328 gnat_pushlevel (void)
329 {
330 struct gnat_binding_level *newlevel = NULL;
331
332 /* Reuse a struct for this binding level, if there is one. */
333 if (free_binding_level)
334 {
335 newlevel = free_binding_level;
336 free_binding_level = free_binding_level->chain;
337 }
338 else
339 newlevel = ggc_alloc_gnat_binding_level ();
340
341 /* Use a free BLOCK, if any; otherwise, allocate one. */
342 if (free_block_chain)
343 {
344 newlevel->block = free_block_chain;
345 free_block_chain = BLOCK_CHAIN (free_block_chain);
346 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
347 }
348 else
349 newlevel->block = make_node (BLOCK);
350
351 /* Point the BLOCK we just made to its parent. */
352 if (current_binding_level)
353 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
354
355 BLOCK_VARS (newlevel->block) = NULL_TREE;
356 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
357 TREE_USED (newlevel->block) = 1;
358
359 /* Add this level to the front of the chain (stack) of active levels. */
360 newlevel->chain = current_binding_level;
361 newlevel->jmpbuf_decl = NULL_TREE;
362 current_binding_level = newlevel;
363 }
364
365 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
366 and point FNDECL to this BLOCK. */
367
368 void
369 set_current_block_context (tree fndecl)
370 {
371 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
372 DECL_INITIAL (fndecl) = current_binding_level->block;
373 set_block_for_group (current_binding_level->block);
374 }
375
376 /* Set the jmpbuf_decl for the current binding level to DECL. */
377
378 void
379 set_block_jmpbuf_decl (tree decl)
380 {
381 current_binding_level->jmpbuf_decl = decl;
382 }
383
384 /* Get the jmpbuf_decl, if any, for the current binding level. */
385
386 tree
387 get_block_jmpbuf_decl (void)
388 {
389 return current_binding_level->jmpbuf_decl;
390 }
391
392 /* Exit a binding level. Set any BLOCK into the current code group. */
393
394 void
395 gnat_poplevel (void)
396 {
397 struct gnat_binding_level *level = current_binding_level;
398 tree block = level->block;
399
400 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
401 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
402
403 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
404 are no variables free the block and merge its subblocks into those of its
405 parent block. Otherwise, add it to the list of its parent. */
406 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
407 ;
408 else if (BLOCK_VARS (block) == NULL_TREE)
409 {
410 BLOCK_SUBBLOCKS (level->chain->block)
411 = chainon (BLOCK_SUBBLOCKS (block),
412 BLOCK_SUBBLOCKS (level->chain->block));
413 BLOCK_CHAIN (block) = free_block_chain;
414 free_block_chain = block;
415 }
416 else
417 {
418 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
419 BLOCK_SUBBLOCKS (level->chain->block) = block;
420 TREE_USED (block) = 1;
421 set_block_for_group (block);
422 }
423
424 /* Free this binding structure. */
425 current_binding_level = level->chain;
426 level->chain = free_binding_level;
427 free_binding_level = level;
428 }
429
430 /* Exit a binding level and discard the associated BLOCK. */
431
432 void
433 gnat_zaplevel (void)
434 {
435 struct gnat_binding_level *level = current_binding_level;
436 tree block = level->block;
437
438 BLOCK_CHAIN (block) = free_block_chain;
439 free_block_chain = block;
440
441 /* Free this binding structure. */
442 current_binding_level = level->chain;
443 level->chain = free_binding_level;
444 free_binding_level = level;
445 }
446 \f
447 /* Records a ..._DECL node DECL as belonging to the current lexical scope
448 and uses GNAT_NODE for location information and propagating flags. */
449
450 void
451 gnat_pushdecl (tree decl, Node_Id gnat_node)
452 {
453 /* If this decl is public external or at toplevel, there is no context. */
454 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
455 DECL_CONTEXT (decl) = 0;
456 else
457 {
458 DECL_CONTEXT (decl) = current_function_decl;
459
460 /* Functions imported in another function are not really nested.
461 For really nested functions mark them initially as needing
462 a static chain for uses of that flag before unnesting;
463 lower_nested_functions will then recompute it. */
464 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
465 DECL_STATIC_CHAIN (decl) = 1;
466 }
467
468 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
469
470 /* Set the location of DECL and emit a declaration for it. */
471 if (Present (gnat_node))
472 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
473 add_decl_expr (decl, gnat_node);
474
475 /* Put the declaration on the list. The list of declarations is in reverse
476 order. The list will be reversed later. Put global declarations in the
477 globals list and local ones in the current block. But skip TYPE_DECLs
478 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
479 with the debugger and aren't needed anyway. */
480 if (!(TREE_CODE (decl) == TYPE_DECL
481 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
482 {
483 if (global_bindings_p ())
484 {
485 VEC_safe_push (tree, gc, global_decls, decl);
486
487 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
488 VEC_safe_push (tree, gc, builtin_decls, decl);
489 }
490 else if (!DECL_EXTERNAL (decl))
491 {
492 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
493 BLOCK_VARS (current_binding_level->block) = decl;
494 }
495 }
496
497 /* For the declaration of a type, set its name if it either is not already
498 set or if the previous type name was not derived from a source name.
499 We'd rather have the type named with a real name and all the pointer
500 types to the same object have the same POINTER_TYPE node. Code in the
501 equivalent function of c-decl.c makes a copy of the type node here, but
502 that may cause us trouble with incomplete types. We make an exception
503 for fat pointer types because the compiler automatically builds them
504 for unconstrained array types and the debugger uses them to represent
505 both these and pointers to these. */
506 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
507 {
508 tree t = TREE_TYPE (decl);
509
510 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
511 ;
512 else if (TYPE_IS_FAT_POINTER_P (t))
513 {
514 tree tt = build_variant_type_copy (t);
515 TYPE_NAME (tt) = decl;
516 TREE_USED (tt) = TREE_USED (t);
517 TREE_TYPE (decl) = tt;
518 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
519 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
520 else
521 DECL_ORIGINAL_TYPE (decl) = t;
522 t = NULL_TREE;
523 DECL_ARTIFICIAL (decl) = 0;
524 }
525 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
526 ;
527 else
528 t = NULL_TREE;
529
530 /* Propagate the name to all the variants. This is needed for
531 the type qualifiers machinery to work properly. */
532 if (t)
533 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
534 TYPE_NAME (t) = decl;
535 }
536 }
537 \f
538 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
539
540 void
541 record_builtin_type (const char *name, tree type)
542 {
543 tree type_decl = build_decl (input_location,
544 TYPE_DECL, get_identifier (name), type);
545
546 gnat_pushdecl (type_decl, Empty);
547
548 if (debug_hooks->type_decl)
549 debug_hooks->type_decl (type_decl, false);
550 }
551 \f
552 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
553 finish constructing the record or union type. If REP_LEVEL is zero, this
554 record has no representation clause and so will be entirely laid out here.
555 If REP_LEVEL is one, this record has a representation clause and has been
556 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
557 this record is derived from a parent record and thus inherits its layout;
558 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
559 we need to write debug information about this type. */
560
561 void
562 finish_record_type (tree record_type, tree field_list, int rep_level,
563 bool debug_info_p)
564 {
565 enum tree_code code = TREE_CODE (record_type);
566 tree name = TYPE_NAME (record_type);
567 tree ada_size = bitsize_zero_node;
568 tree size = bitsize_zero_node;
569 bool had_size = TYPE_SIZE (record_type) != 0;
570 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
571 bool had_align = TYPE_ALIGN (record_type) != 0;
572 tree field;
573
574 TYPE_FIELDS (record_type) = field_list;
575
576 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
577 generate debug info and have a parallel type. */
578 if (name && TREE_CODE (name) == TYPE_DECL)
579 name = DECL_NAME (name);
580 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
581
582 /* Globally initialize the record first. If this is a rep'ed record,
583 that just means some initializations; otherwise, layout the record. */
584 if (rep_level > 0)
585 {
586 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
587
588 if (!had_size_unit)
589 TYPE_SIZE_UNIT (record_type) = size_zero_node;
590
591 if (!had_size)
592 TYPE_SIZE (record_type) = bitsize_zero_node;
593
594 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
595 out just like a UNION_TYPE, since the size will be fixed. */
596 else if (code == QUAL_UNION_TYPE)
597 code = UNION_TYPE;
598 }
599 else
600 {
601 /* Ensure there isn't a size already set. There can be in an error
602 case where there is a rep clause but all fields have errors and
603 no longer have a position. */
604 TYPE_SIZE (record_type) = 0;
605 layout_type (record_type);
606 }
607
608 /* At this point, the position and size of each field is known. It was
609 either set before entry by a rep clause, or by laying out the type above.
610
611 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
612 to compute the Ada size; the GCC size and alignment (for rep'ed records
613 that are not padding types); and the mode (for rep'ed records). We also
614 clear the DECL_BIT_FIELD indication for the cases we know have not been
615 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
616
617 if (code == QUAL_UNION_TYPE)
618 field_list = nreverse (field_list);
619
620 for (field = field_list; field; field = DECL_CHAIN (field))
621 {
622 tree type = TREE_TYPE (field);
623 tree pos = bit_position (field);
624 tree this_size = DECL_SIZE (field);
625 tree this_ada_size;
626
627 if ((TREE_CODE (type) == RECORD_TYPE
628 || TREE_CODE (type) == UNION_TYPE
629 || TREE_CODE (type) == QUAL_UNION_TYPE)
630 && !TYPE_FAT_POINTER_P (type)
631 && !TYPE_CONTAINS_TEMPLATE_P (type)
632 && TYPE_ADA_SIZE (type))
633 this_ada_size = TYPE_ADA_SIZE (type);
634 else
635 this_ada_size = this_size;
636
637 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
638 if (DECL_BIT_FIELD (field)
639 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
640 {
641 unsigned int align = TYPE_ALIGN (type);
642
643 /* In the general case, type alignment is required. */
644 if (value_factor_p (pos, align))
645 {
646 /* The enclosing record type must be sufficiently aligned.
647 Otherwise, if no alignment was specified for it and it
648 has been laid out already, bump its alignment to the
649 desired one if this is compatible with its size. */
650 if (TYPE_ALIGN (record_type) >= align)
651 {
652 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
653 DECL_BIT_FIELD (field) = 0;
654 }
655 else if (!had_align
656 && rep_level == 0
657 && value_factor_p (TYPE_SIZE (record_type), align))
658 {
659 TYPE_ALIGN (record_type) = align;
660 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
661 DECL_BIT_FIELD (field) = 0;
662 }
663 }
664
665 /* In the non-strict alignment case, only byte alignment is. */
666 if (!STRICT_ALIGNMENT
667 && DECL_BIT_FIELD (field)
668 && value_factor_p (pos, BITS_PER_UNIT))
669 DECL_BIT_FIELD (field) = 0;
670 }
671
672 /* If we still have DECL_BIT_FIELD set at this point, we know that the
673 field is technically not addressable. Except that it can actually
674 be addressed if it is BLKmode and happens to be properly aligned. */
675 if (DECL_BIT_FIELD (field)
676 && !(DECL_MODE (field) == BLKmode
677 && value_factor_p (pos, BITS_PER_UNIT)))
678 DECL_NONADDRESSABLE_P (field) = 1;
679
680 /* A type must be as aligned as its most aligned field that is not
681 a bit-field. But this is already enforced by layout_type. */
682 if (rep_level > 0 && !DECL_BIT_FIELD (field))
683 TYPE_ALIGN (record_type)
684 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
685
686 switch (code)
687 {
688 case UNION_TYPE:
689 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
690 size = size_binop (MAX_EXPR, size, this_size);
691 break;
692
693 case QUAL_UNION_TYPE:
694 ada_size
695 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
696 this_ada_size, ada_size);
697 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
698 this_size, size);
699 break;
700
701 case RECORD_TYPE:
702 /* Since we know here that all fields are sorted in order of
703 increasing bit position, the size of the record is one
704 higher than the ending bit of the last field processed
705 unless we have a rep clause, since in that case we might
706 have a field outside a QUAL_UNION_TYPE that has a higher ending
707 position. So use a MAX in that case. Also, if this field is a
708 QUAL_UNION_TYPE, we need to take into account the previous size in
709 the case of empty variants. */
710 ada_size
711 = merge_sizes (ada_size, pos, this_ada_size,
712 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
713 size
714 = merge_sizes (size, pos, this_size,
715 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
716 break;
717
718 default:
719 gcc_unreachable ();
720 }
721 }
722
723 if (code == QUAL_UNION_TYPE)
724 nreverse (field_list);
725
726 if (rep_level < 2)
727 {
728 /* If this is a padding record, we never want to make the size smaller
729 than what was specified in it, if any. */
730 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
731 size = TYPE_SIZE (record_type);
732
733 /* Now set any of the values we've just computed that apply. */
734 if (!TYPE_FAT_POINTER_P (record_type)
735 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
736 SET_TYPE_ADA_SIZE (record_type, ada_size);
737
738 if (rep_level > 0)
739 {
740 tree size_unit = had_size_unit
741 ? TYPE_SIZE_UNIT (record_type)
742 : convert (sizetype,
743 size_binop (CEIL_DIV_EXPR, size,
744 bitsize_unit_node));
745 unsigned int align = TYPE_ALIGN (record_type);
746
747 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
748 TYPE_SIZE_UNIT (record_type)
749 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
750
751 compute_record_mode (record_type);
752 }
753 }
754
755 if (debug_info_p)
756 rest_of_record_type_compilation (record_type);
757 }
758
759 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
760 associated with it. It need not be invoked directly in most cases since
761 finish_record_type takes care of doing so, but this can be necessary if
762 a parallel type is to be attached to the record type. */
763
764 void
765 rest_of_record_type_compilation (tree record_type)
766 {
767 tree field_list = TYPE_FIELDS (record_type);
768 tree field;
769 enum tree_code code = TREE_CODE (record_type);
770 bool var_size = false;
771
772 for (field = field_list; field; field = DECL_CHAIN (field))
773 {
774 /* We need to make an XVE/XVU record if any field has variable size,
775 whether or not the record does. For example, if we have a union,
776 it may be that all fields, rounded up to the alignment, have the
777 same size, in which case we'll use that size. But the debug
778 output routines (except Dwarf2) won't be able to output the fields,
779 so we need to make the special record. */
780 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
781 /* If a field has a non-constant qualifier, the record will have
782 variable size too. */
783 || (code == QUAL_UNION_TYPE
784 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
785 {
786 var_size = true;
787 break;
788 }
789 }
790
791 /* If this record is of variable size, rename it so that the
792 debugger knows it is and make a new, parallel, record
793 that tells the debugger how the record is laid out. See
794 exp_dbug.ads. But don't do this for records that are padding
795 since they confuse GDB. */
796 if (var_size && !TYPE_IS_PADDING_P (record_type))
797 {
798 tree new_record_type
799 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
800 ? UNION_TYPE : TREE_CODE (record_type));
801 tree orig_name = TYPE_NAME (record_type), new_name;
802 tree last_pos = bitsize_zero_node;
803 tree old_field, prev_old_field = NULL_TREE;
804
805 if (TREE_CODE (orig_name) == TYPE_DECL)
806 orig_name = DECL_NAME (orig_name);
807
808 new_name
809 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
810 ? "XVU" : "XVE");
811 TYPE_NAME (new_record_type) = new_name;
812 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
813 TYPE_STUB_DECL (new_record_type)
814 = create_type_stub_decl (new_name, new_record_type);
815 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
816 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
817 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
818 TYPE_SIZE_UNIT (new_record_type)
819 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
820
821 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
822
823 /* Now scan all the fields, replacing each field with a new
824 field corresponding to the new encoding. */
825 for (old_field = TYPE_FIELDS (record_type); old_field;
826 old_field = DECL_CHAIN (old_field))
827 {
828 tree field_type = TREE_TYPE (old_field);
829 tree field_name = DECL_NAME (old_field);
830 tree new_field;
831 tree curpos = bit_position (old_field);
832 bool var = false;
833 unsigned int align = 0;
834 tree pos;
835
836 /* See how the position was modified from the last position.
837
838 There are two basic cases we support: a value was added
839 to the last position or the last position was rounded to
840 a boundary and they something was added. Check for the
841 first case first. If not, see if there is any evidence
842 of rounding. If so, round the last position and try
843 again.
844
845 If this is a union, the position can be taken as zero. */
846
847 /* Some computations depend on the shape of the position expression,
848 so strip conversions to make sure it's exposed. */
849 curpos = remove_conversions (curpos, true);
850
851 if (TREE_CODE (new_record_type) == UNION_TYPE)
852 pos = bitsize_zero_node, align = 0;
853 else
854 pos = compute_related_constant (curpos, last_pos);
855
856 if (!pos && TREE_CODE (curpos) == MULT_EXPR
857 && host_integerp (TREE_OPERAND (curpos, 1), 1))
858 {
859 tree offset = TREE_OPERAND (curpos, 0);
860 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
861
862 /* An offset which is a bitwise AND with a negative power of 2
863 means an alignment corresponding to this power of 2. Note
864 that, as sizetype is sign-extended but nonetheless unsigned,
865 we don't directly use tree_int_cst_sgn. */
866 offset = remove_conversions (offset, true);
867 if (TREE_CODE (offset) == BIT_AND_EXPR
868 && host_integerp (TREE_OPERAND (offset, 1), 0)
869 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
870 {
871 unsigned int pow
872 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
873 if (exact_log2 (pow) > 0)
874 align *= pow;
875 }
876
877 pos = compute_related_constant (curpos,
878 round_up (last_pos, align));
879 }
880 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
881 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
882 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
883 && host_integerp (TREE_OPERAND
884 (TREE_OPERAND (curpos, 0), 1),
885 1))
886 {
887 align
888 = tree_low_cst
889 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
890 pos = compute_related_constant (curpos,
891 round_up (last_pos, align));
892 }
893 else if (potential_alignment_gap (prev_old_field, old_field,
894 pos))
895 {
896 align = TYPE_ALIGN (field_type);
897 pos = compute_related_constant (curpos,
898 round_up (last_pos, align));
899 }
900
901 /* If we can't compute a position, set it to zero.
902
903 ??? We really should abort here, but it's too much work
904 to get this correct for all cases. */
905
906 if (!pos)
907 pos = bitsize_zero_node;
908
909 /* See if this type is variable-sized and make a pointer type
910 and indicate the indirection if so. Beware that the debug
911 back-end may adjust the position computed above according
912 to the alignment of the field type, i.e. the pointer type
913 in this case, if we don't preventively counter that. */
914 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
915 {
916 field_type = build_pointer_type (field_type);
917 if (align != 0 && TYPE_ALIGN (field_type) > align)
918 {
919 field_type = copy_node (field_type);
920 TYPE_ALIGN (field_type) = align;
921 }
922 var = true;
923 }
924
925 /* Make a new field name, if necessary. */
926 if (var || align != 0)
927 {
928 char suffix[16];
929
930 if (align != 0)
931 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
932 align / BITS_PER_UNIT);
933 else
934 strcpy (suffix, "XVL");
935
936 field_name = concat_name (field_name, suffix);
937 }
938
939 new_field
940 = create_field_decl (field_name, field_type, new_record_type,
941 DECL_SIZE (old_field), pos, 0, 0);
942 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
943 TYPE_FIELDS (new_record_type) = new_field;
944
945 /* If old_field is a QUAL_UNION_TYPE, take its size as being
946 zero. The only time it's not the last field of the record
947 is when there are other components at fixed positions after
948 it (meaning there was a rep clause for every field) and we
949 want to be able to encode them. */
950 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
951 (TREE_CODE (TREE_TYPE (old_field))
952 == QUAL_UNION_TYPE)
953 ? bitsize_zero_node
954 : DECL_SIZE (old_field));
955 prev_old_field = old_field;
956 }
957
958 TYPE_FIELDS (new_record_type)
959 = nreverse (TYPE_FIELDS (new_record_type));
960
961 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
962 }
963
964 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
965 }
966
967 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
968
969 void
970 add_parallel_type (tree decl, tree parallel_type)
971 {
972 tree d = decl;
973
974 while (DECL_PARALLEL_TYPE (d))
975 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
976
977 SET_DECL_PARALLEL_TYPE (d, parallel_type);
978 }
979
980 /* Utility function of above to merge LAST_SIZE, the previous size of a record
981 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
982 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
983 replace a value of zero with the old size. If HAS_REP is true, we take the
984 MAX of the end position of this field with LAST_SIZE. In all other cases,
985 we use FIRST_BIT plus SIZE. Return an expression for the size. */
986
987 static tree
988 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
989 bool has_rep)
990 {
991 tree type = TREE_TYPE (last_size);
992 tree new_size;
993
994 if (!special || TREE_CODE (size) != COND_EXPR)
995 {
996 new_size = size_binop (PLUS_EXPR, first_bit, size);
997 if (has_rep)
998 new_size = size_binop (MAX_EXPR, last_size, new_size);
999 }
1000
1001 else
1002 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1003 integer_zerop (TREE_OPERAND (size, 1))
1004 ? last_size : merge_sizes (last_size, first_bit,
1005 TREE_OPERAND (size, 1),
1006 1, has_rep),
1007 integer_zerop (TREE_OPERAND (size, 2))
1008 ? last_size : merge_sizes (last_size, first_bit,
1009 TREE_OPERAND (size, 2),
1010 1, has_rep));
1011
1012 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1013 when fed through substitute_in_expr) into thinking that a constant
1014 size is not constant. */
1015 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1016 new_size = TREE_OPERAND (new_size, 0);
1017
1018 return new_size;
1019 }
1020
1021 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1022 related by the addition of a constant. Return that constant if so. */
1023
1024 static tree
1025 compute_related_constant (tree op0, tree op1)
1026 {
1027 tree op0_var, op1_var;
1028 tree op0_con = split_plus (op0, &op0_var);
1029 tree op1_con = split_plus (op1, &op1_var);
1030 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1031
1032 if (operand_equal_p (op0_var, op1_var, 0))
1033 return result;
1034 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1035 return result;
1036 else
1037 return 0;
1038 }
1039
1040 /* Utility function of above to split a tree OP which may be a sum, into a
1041 constant part, which is returned, and a variable part, which is stored
1042 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1043 bitsizetype. */
1044
1045 static tree
1046 split_plus (tree in, tree *pvar)
1047 {
1048 /* Strip NOPS in order to ease the tree traversal and maximize the
1049 potential for constant or plus/minus discovery. We need to be careful
1050 to always return and set *pvar to bitsizetype trees, but it's worth
1051 the effort. */
1052 STRIP_NOPS (in);
1053
1054 *pvar = convert (bitsizetype, in);
1055
1056 if (TREE_CODE (in) == INTEGER_CST)
1057 {
1058 *pvar = bitsize_zero_node;
1059 return convert (bitsizetype, in);
1060 }
1061 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1062 {
1063 tree lhs_var, rhs_var;
1064 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1065 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1066
1067 if (lhs_var == TREE_OPERAND (in, 0)
1068 && rhs_var == TREE_OPERAND (in, 1))
1069 return bitsize_zero_node;
1070
1071 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1072 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1073 }
1074 else
1075 return bitsize_zero_node;
1076 }
1077 \f
1078 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1079 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1080 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1081 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1082 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1083 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1084 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1085 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1086 invisible reference. */
1087
1088 tree
1089 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1090 bool return_unconstrained_p, bool return_by_direct_ref_p,
1091 bool return_by_invisi_ref_p)
1092 {
1093 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1094 the subprogram formal parameters. This list is generated by traversing
1095 the input list of PARM_DECL nodes. */
1096 tree param_type_list = NULL_TREE;
1097 tree t, type;
1098
1099 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1100 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (t), param_type_list);
1101
1102 /* The list of the function parameter types has to be terminated by the void
1103 type to signal to the back-end that we are not dealing with a variable
1104 parameter subprogram, but that it has a fixed number of parameters. */
1105 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1106
1107 /* The list of argument types has been created in reverse so reverse it. */
1108 param_type_list = nreverse (param_type_list);
1109
1110 type = build_function_type (return_type, param_type_list);
1111
1112 /* TYPE may have been shared since GCC hashes types. If it has a different
1113 CICO_LIST, make a copy. Likewise for the various flags. */
1114 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1115 return_by_direct_ref_p, return_by_invisi_ref_p))
1116 {
1117 type = copy_type (type);
1118 TYPE_CI_CO_LIST (type) = cico_list;
1119 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1120 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1121 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1122 }
1123
1124 return type;
1125 }
1126 \f
1127 /* Return a copy of TYPE but safe to modify in any way. */
1128
1129 tree
1130 copy_type (tree type)
1131 {
1132 tree new_type = copy_node (type);
1133
1134 /* Unshare the language-specific data. */
1135 if (TYPE_LANG_SPECIFIC (type))
1136 {
1137 TYPE_LANG_SPECIFIC (new_type) = NULL;
1138 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1139 }
1140
1141 /* And the contents of the language-specific slot if needed. */
1142 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1143 && TYPE_RM_VALUES (type))
1144 {
1145 TYPE_RM_VALUES (new_type) = NULL_TREE;
1146 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1147 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1148 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1149 }
1150
1151 /* copy_node clears this field instead of copying it, because it is
1152 aliased with TREE_CHAIN. */
1153 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
1154
1155 TYPE_POINTER_TO (new_type) = 0;
1156 TYPE_REFERENCE_TO (new_type) = 0;
1157 TYPE_MAIN_VARIANT (new_type) = new_type;
1158 TYPE_NEXT_VARIANT (new_type) = 0;
1159
1160 return new_type;
1161 }
1162 \f
1163 /* Return a subtype of sizetype with range MIN to MAX and whose
1164 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1165 of the associated TYPE_DECL. */
1166
1167 tree
1168 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1169 {
1170 /* First build a type for the desired range. */
1171 tree type = build_nonshared_range_type (sizetype, min, max);
1172
1173 /* Then set the index type. */
1174 SET_TYPE_INDEX_TYPE (type, index);
1175 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1176
1177 return type;
1178 }
1179
1180 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
1181 sizetype is used. */
1182
1183 tree
1184 create_range_type (tree type, tree min, tree max)
1185 {
1186 tree range_type;
1187
1188 if (type == NULL_TREE)
1189 type = sizetype;
1190
1191 /* First build a type with the base range. */
1192 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
1193 TYPE_MAX_VALUE (type));
1194
1195 /* Then set the actual range. */
1196 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
1197 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
1198
1199 return range_type;
1200 }
1201 \f
1202 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1203 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1204 its data type. */
1205
1206 tree
1207 create_type_stub_decl (tree type_name, tree type)
1208 {
1209 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1210 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1211 emitted in DWARF. */
1212 tree type_decl = build_decl (input_location,
1213 TYPE_DECL, type_name, type);
1214 DECL_ARTIFICIAL (type_decl) = 1;
1215 return type_decl;
1216 }
1217
1218 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1219 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1220 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1221 true if we need to write debug information about this type. GNAT_NODE
1222 is used for the position of the decl. */
1223
1224 tree
1225 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1226 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1227 {
1228 enum tree_code code = TREE_CODE (type);
1229 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1230 tree type_decl;
1231
1232 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1233 gcc_assert (!TYPE_IS_DUMMY_P (type));
1234
1235 /* If the type hasn't been named yet, we're naming it; preserve an existing
1236 TYPE_STUB_DECL that has been attached to it for some purpose. */
1237 if (!named && TYPE_STUB_DECL (type))
1238 {
1239 type_decl = TYPE_STUB_DECL (type);
1240 DECL_NAME (type_decl) = type_name;
1241 }
1242 else
1243 type_decl = build_decl (input_location,
1244 TYPE_DECL, type_name, type);
1245
1246 DECL_ARTIFICIAL (type_decl) = artificial_p;
1247
1248 /* Add this decl to the current binding level. */
1249 gnat_pushdecl (type_decl, gnat_node);
1250
1251 process_attributes (type_decl, attr_list);
1252
1253 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1254 This causes the name to be also viewed as a "tag" by the debug
1255 back-end, with the advantage that no DW_TAG_typedef is emitted
1256 for artificial "tagged" types in DWARF. */
1257 if (!named)
1258 TYPE_STUB_DECL (type) = type_decl;
1259
1260 /* Pass the type declaration to the debug back-end unless this is an
1261 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1262 type for which debugging information was not requested, or else an
1263 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1264 handled separately. And do not pass dummy types either. */
1265 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1266 DECL_IGNORED_P (type_decl) = 1;
1267 else if (code != ENUMERAL_TYPE
1268 && (code != RECORD_TYPE || TYPE_FAT_POINTER_P (type))
1269 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1270 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1271 && !(code == RECORD_TYPE
1272 && TYPE_IS_DUMMY_P
1273 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1274 rest_of_type_decl_compilation (type_decl);
1275
1276 return type_decl;
1277 }
1278 \f
1279 /* Return a VAR_DECL or CONST_DECL node.
1280
1281 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1282 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1283 the GCC tree for an optional initial expression; NULL_TREE if none.
1284
1285 CONST_FLAG is true if this variable is constant, in which case we might
1286 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1287
1288 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1289 definition to be made visible outside of the current compilation unit, for
1290 instance variable definitions in a package specification.
1291
1292 EXTERN_FLAG is true when processing an external variable declaration (as
1293 opposed to a definition: no storage is to be allocated for the variable).
1294
1295 STATIC_FLAG is only relevant when not at top level. In that case
1296 it indicates whether to always allocate storage to the variable.
1297
1298 GNAT_NODE is used for the position of the decl. */
1299
1300 tree
1301 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1302 bool const_flag, bool public_flag, bool extern_flag,
1303 bool static_flag, bool const_decl_allowed_p,
1304 struct attrib *attr_list, Node_Id gnat_node)
1305 {
1306 bool init_const
1307 = (var_init != 0
1308 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1309 && (global_bindings_p () || static_flag
1310 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1311 : TREE_CONSTANT (var_init)));
1312
1313 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1314 case the initializer may be used in-lieu of the DECL node (as done in
1315 Identifier_to_gnu). This is useful to prevent the need of elaboration
1316 code when an identifier for which such a decl is made is in turn used as
1317 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1318 but extra constraints apply to this choice (see below) and are not
1319 relevant to the distinction we wish to make. */
1320 bool constant_p = const_flag && init_const;
1321
1322 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1323 and may be used for scalars in general but not for aggregates. */
1324 tree var_decl
1325 = build_decl (input_location,
1326 (constant_p && const_decl_allowed_p
1327 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1328 var_name, type);
1329
1330 /* If this is external, throw away any initializations (they will be done
1331 elsewhere) unless this is a constant for which we would like to remain
1332 able to get the initializer. If we are defining a global here, leave a
1333 constant initialization and save any variable elaborations for the
1334 elaboration routine. If we are just annotating types, throw away the
1335 initialization if it isn't a constant. */
1336 if ((extern_flag && !constant_p)
1337 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1338 var_init = NULL_TREE;
1339
1340 /* At the global level, an initializer requiring code to be generated
1341 produces elaboration statements. Check that such statements are allowed,
1342 that is, not violating a No_Elaboration_Code restriction. */
1343 if (global_bindings_p () && var_init != 0 && !init_const)
1344 Check_Elaboration_Code_Allowed (gnat_node);
1345
1346 DECL_INITIAL (var_decl) = var_init;
1347 TREE_READONLY (var_decl) = const_flag;
1348 DECL_EXTERNAL (var_decl) = extern_flag;
1349 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1350 TREE_CONSTANT (var_decl) = constant_p;
1351 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1352 = TYPE_VOLATILE (type);
1353
1354 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1355 try to fiddle with DECL_COMMON. However, on platforms that don't
1356 support global BSS sections, uninitialized global variables would
1357 go in DATA instead, thus increasing the size of the executable. */
1358 if (!flag_no_common
1359 && TREE_CODE (var_decl) == VAR_DECL
1360 && TREE_PUBLIC (var_decl)
1361 && !have_global_bss_p ())
1362 DECL_COMMON (var_decl) = 1;
1363
1364 /* At the global binding level, we need to allocate static storage for the
1365 variable if it isn't external. Otherwise, we allocate automatic storage
1366 unless requested not to. */
1367 TREE_STATIC (var_decl)
1368 = !extern_flag && (static_flag || global_bindings_p ());
1369
1370 /* For an external constant whose initializer is not absolute, do not emit
1371 debug info. In DWARF this would mean a global relocation in a read-only
1372 section which runs afoul of the PE-COFF run-time relocation mechanism. */
1373 if (extern_flag
1374 && constant_p
1375 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1376 != null_pointer_node)
1377 DECL_IGNORED_P (var_decl) = 1;
1378
1379 /* Add this decl to the current binding level. */
1380 gnat_pushdecl (var_decl, gnat_node);
1381
1382 if (TREE_SIDE_EFFECTS (var_decl))
1383 TREE_ADDRESSABLE (var_decl) = 1;
1384
1385 if (TREE_CODE (var_decl) == VAR_DECL)
1386 {
1387 if (asm_name)
1388 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1389 process_attributes (var_decl, attr_list);
1390 if (global_bindings_p ())
1391 rest_of_decl_compilation (var_decl, true, 0);
1392 }
1393 else
1394 expand_decl (var_decl);
1395
1396 return var_decl;
1397 }
1398 \f
1399 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1400
1401 static bool
1402 aggregate_type_contains_array_p (tree type)
1403 {
1404 switch (TREE_CODE (type))
1405 {
1406 case RECORD_TYPE:
1407 case UNION_TYPE:
1408 case QUAL_UNION_TYPE:
1409 {
1410 tree field;
1411 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
1412 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1413 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1414 return true;
1415 return false;
1416 }
1417
1418 case ARRAY_TYPE:
1419 return true;
1420
1421 default:
1422 gcc_unreachable ();
1423 }
1424 }
1425
1426 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
1427 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
1428 nonzero, it is the specified size of the field. If POS is nonzero, it is
1429 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
1430 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
1431 means we are allowed to take the address of the field; if it is negative,
1432 we should not make a bitfield, which is used by make_aligning_type. */
1433
1434 tree
1435 create_field_decl (tree field_name, tree field_type, tree record_type,
1436 tree size, tree pos, int packed, int addressable)
1437 {
1438 tree field_decl = build_decl (input_location,
1439 FIELD_DECL, field_name, field_type);
1440
1441 DECL_CONTEXT (field_decl) = record_type;
1442 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1443
1444 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1445 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1446 Likewise for an aggregate without specified position that contains an
1447 array, because in this case slices of variable length of this array
1448 must be handled by GCC and variable-sized objects need to be aligned
1449 to at least a byte boundary. */
1450 if (packed && (TYPE_MODE (field_type) == BLKmode
1451 || (!pos
1452 && AGGREGATE_TYPE_P (field_type)
1453 && aggregate_type_contains_array_p (field_type))))
1454 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1455
1456 /* If a size is specified, use it. Otherwise, if the record type is packed
1457 compute a size to use, which may differ from the object's natural size.
1458 We always set a size in this case to trigger the checks for bitfield
1459 creation below, which is typically required when no position has been
1460 specified. */
1461 if (size)
1462 size = convert (bitsizetype, size);
1463 else if (packed == 1)
1464 {
1465 size = rm_size (field_type);
1466 if (TYPE_MODE (field_type) == BLKmode)
1467 size = round_up (size, BITS_PER_UNIT);
1468 }
1469
1470 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1471 specified for two reasons: first if the size differs from the natural
1472 size. Second, if the alignment is insufficient. There are a number of
1473 ways the latter can be true.
1474
1475 We never make a bitfield if the type of the field has a nonconstant size,
1476 because no such entity requiring bitfield operations should reach here.
1477
1478 We do *preventively* make a bitfield when there might be the need for it
1479 but we don't have all the necessary information to decide, as is the case
1480 of a field with no specified position in a packed record.
1481
1482 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1483 in layout_decl or finish_record_type to clear the bit_field indication if
1484 it is in fact not needed. */
1485 if (addressable >= 0
1486 && size
1487 && TREE_CODE (size) == INTEGER_CST
1488 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1489 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1490 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1491 || packed
1492 || (TYPE_ALIGN (record_type) != 0
1493 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1494 {
1495 DECL_BIT_FIELD (field_decl) = 1;
1496 DECL_SIZE (field_decl) = size;
1497 if (!packed && !pos)
1498 {
1499 if (TYPE_ALIGN (record_type) != 0
1500 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1501 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1502 else
1503 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1504 }
1505 }
1506
1507 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1508
1509 /* Bump the alignment if need be, either for bitfield/packing purposes or
1510 to satisfy the type requirements if no such consideration applies. When
1511 we get the alignment from the type, indicate if this is from an explicit
1512 user request, which prevents stor-layout from lowering it later on. */
1513 {
1514 unsigned int bit_align
1515 = (DECL_BIT_FIELD (field_decl) ? 1
1516 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1517
1518 if (bit_align > DECL_ALIGN (field_decl))
1519 DECL_ALIGN (field_decl) = bit_align;
1520 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1521 {
1522 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1523 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1524 }
1525 }
1526
1527 if (pos)
1528 {
1529 /* We need to pass in the alignment the DECL is known to have.
1530 This is the lowest-order bit set in POS, but no more than
1531 the alignment of the record, if one is specified. Note
1532 that an alignment of 0 is taken as infinite. */
1533 unsigned int known_align;
1534
1535 if (host_integerp (pos, 1))
1536 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1537 else
1538 known_align = BITS_PER_UNIT;
1539
1540 if (TYPE_ALIGN (record_type)
1541 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1542 known_align = TYPE_ALIGN (record_type);
1543
1544 layout_decl (field_decl, known_align);
1545 SET_DECL_OFFSET_ALIGN (field_decl,
1546 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1547 : BITS_PER_UNIT);
1548 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1549 &DECL_FIELD_BIT_OFFSET (field_decl),
1550 DECL_OFFSET_ALIGN (field_decl), pos);
1551 }
1552
1553 /* In addition to what our caller says, claim the field is addressable if we
1554 know that its type is not suitable.
1555
1556 The field may also be "technically" nonaddressable, meaning that even if
1557 we attempt to take the field's address we will actually get the address
1558 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1559 value we have at this point is not accurate enough, so we don't account
1560 for this here and let finish_record_type decide. */
1561 if (!addressable && !type_for_nonaliased_component_p (field_type))
1562 addressable = 1;
1563
1564 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1565
1566 return field_decl;
1567 }
1568 \f
1569 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1570 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1571 (either an In parameter or an address of a pass-by-ref parameter). */
1572
1573 tree
1574 create_param_decl (tree param_name, tree param_type, bool readonly)
1575 {
1576 tree param_decl = build_decl (input_location,
1577 PARM_DECL, param_name, param_type);
1578
1579 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1580 can lead to various ABI violations. */
1581 if (targetm.calls.promote_prototypes (NULL_TREE)
1582 && INTEGRAL_TYPE_P (param_type)
1583 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1584 {
1585 /* We have to be careful about biased types here. Make a subtype
1586 of integer_type_node with the proper biasing. */
1587 if (TREE_CODE (param_type) == INTEGER_TYPE
1588 && TYPE_BIASED_REPRESENTATION_P (param_type))
1589 {
1590 tree subtype
1591 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
1592 TREE_TYPE (subtype) = integer_type_node;
1593 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1594 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
1595 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
1596 param_type = subtype;
1597 }
1598 else
1599 param_type = integer_type_node;
1600 }
1601
1602 DECL_ARG_TYPE (param_decl) = param_type;
1603 TREE_READONLY (param_decl) = readonly;
1604 return param_decl;
1605 }
1606 \f
1607 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1608
1609 static void
1610 process_attributes (tree decl, struct attrib *attr_list)
1611 {
1612 for (; attr_list; attr_list = attr_list->next)
1613 switch (attr_list->type)
1614 {
1615 case ATTR_MACHINE_ATTRIBUTE:
1616 input_location = DECL_SOURCE_LOCATION (decl);
1617 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1618 NULL_TREE),
1619 ATTR_FLAG_TYPE_IN_PLACE);
1620 break;
1621
1622 case ATTR_LINK_ALIAS:
1623 if (! DECL_EXTERNAL (decl))
1624 {
1625 TREE_STATIC (decl) = 1;
1626 assemble_alias (decl, attr_list->name);
1627 }
1628 break;
1629
1630 case ATTR_WEAK_EXTERNAL:
1631 if (SUPPORTS_WEAK)
1632 declare_weak (decl);
1633 else
1634 post_error ("?weak declarations not supported on this target",
1635 attr_list->error_point);
1636 break;
1637
1638 case ATTR_LINK_SECTION:
1639 if (targetm.have_named_sections)
1640 {
1641 DECL_SECTION_NAME (decl)
1642 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1643 IDENTIFIER_POINTER (attr_list->name));
1644 DECL_COMMON (decl) = 0;
1645 }
1646 else
1647 post_error ("?section attributes are not supported for this target",
1648 attr_list->error_point);
1649 break;
1650
1651 case ATTR_LINK_CONSTRUCTOR:
1652 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1653 TREE_USED (decl) = 1;
1654 break;
1655
1656 case ATTR_LINK_DESTRUCTOR:
1657 DECL_STATIC_DESTRUCTOR (decl) = 1;
1658 TREE_USED (decl) = 1;
1659 break;
1660
1661 case ATTR_THREAD_LOCAL_STORAGE:
1662 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1663 DECL_COMMON (decl) = 0;
1664 break;
1665 }
1666 }
1667 \f
1668 /* Record DECL as a global renaming pointer. */
1669
1670 void
1671 record_global_renaming_pointer (tree decl)
1672 {
1673 gcc_assert (DECL_RENAMED_OBJECT (decl));
1674 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1675 }
1676
1677 /* Invalidate the global renaming pointers. */
1678
1679 void
1680 invalidate_global_renaming_pointers (void)
1681 {
1682 unsigned int i;
1683 tree iter;
1684
1685 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
1686 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1687
1688 VEC_free (tree, gc, global_renaming_pointers);
1689 }
1690
1691 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1692 a power of 2. */
1693
1694 bool
1695 value_factor_p (tree value, HOST_WIDE_INT factor)
1696 {
1697 if (host_integerp (value, 1))
1698 return tree_low_cst (value, 1) % factor == 0;
1699
1700 if (TREE_CODE (value) == MULT_EXPR)
1701 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1702 || value_factor_p (TREE_OPERAND (value, 1), factor));
1703
1704 return false;
1705 }
1706
1707 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1708 unless we can prove these 2 fields are laid out in such a way that no gap
1709 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1710 is the distance in bits between the end of PREV_FIELD and the starting
1711 position of CURR_FIELD. It is ignored if null. */
1712
1713 static bool
1714 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1715 {
1716 /* If this is the first field of the record, there cannot be any gap */
1717 if (!prev_field)
1718 return false;
1719
1720 /* If the previous field is a union type, then return False: The only
1721 time when such a field is not the last field of the record is when
1722 there are other components at fixed positions after it (meaning there
1723 was a rep clause for every field), in which case we don't want the
1724 alignment constraint to override them. */
1725 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1726 return false;
1727
1728 /* If the distance between the end of prev_field and the beginning of
1729 curr_field is constant, then there is a gap if the value of this
1730 constant is not null. */
1731 if (offset && host_integerp (offset, 1))
1732 return !integer_zerop (offset);
1733
1734 /* If the size and position of the previous field are constant,
1735 then check the sum of this size and position. There will be a gap
1736 iff it is not multiple of the current field alignment. */
1737 if (host_integerp (DECL_SIZE (prev_field), 1)
1738 && host_integerp (bit_position (prev_field), 1))
1739 return ((tree_low_cst (bit_position (prev_field), 1)
1740 + tree_low_cst (DECL_SIZE (prev_field), 1))
1741 % DECL_ALIGN (curr_field) != 0);
1742
1743 /* If both the position and size of the previous field are multiples
1744 of the current field alignment, there cannot be any gap. */
1745 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1746 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1747 return false;
1748
1749 /* Fallback, return that there may be a potential gap */
1750 return true;
1751 }
1752
1753 /* Returns a LABEL_DECL node for LABEL_NAME. */
1754
1755 tree
1756 create_label_decl (tree label_name)
1757 {
1758 tree label_decl = build_decl (input_location,
1759 LABEL_DECL, label_name, void_type_node);
1760
1761 DECL_CONTEXT (label_decl) = current_function_decl;
1762 DECL_MODE (label_decl) = VOIDmode;
1763 DECL_SOURCE_LOCATION (label_decl) = input_location;
1764
1765 return label_decl;
1766 }
1767 \f
1768 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1769 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1770 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1771 PARM_DECL nodes chained through the TREE_CHAIN field).
1772
1773 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1774 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1775
1776 tree
1777 create_subprog_decl (tree subprog_name, tree asm_name,
1778 tree subprog_type, tree param_decl_list, bool inline_flag,
1779 bool public_flag, bool extern_flag,
1780 struct attrib *attr_list, Node_Id gnat_node)
1781 {
1782 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
1783 subprog_type);
1784 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
1785 TREE_TYPE (subprog_type));
1786
1787 /* If this is a non-inline function nested inside an inlined external
1788 function, we cannot honor both requests without cloning the nested
1789 function in the current unit since it is private to the other unit.
1790 We could inline the nested function as well but it's probably better
1791 to err on the side of too little inlining. */
1792 if (!inline_flag
1793 && current_function_decl
1794 && DECL_DECLARED_INLINE_P (current_function_decl)
1795 && DECL_EXTERNAL (current_function_decl))
1796 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1797
1798 DECL_EXTERNAL (subprog_decl) = extern_flag;
1799 TREE_PUBLIC (subprog_decl) = public_flag;
1800 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1801 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1802 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1803 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1804 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1805
1806 DECL_ARTIFICIAL (result_decl) = 1;
1807 DECL_IGNORED_P (result_decl) = 1;
1808 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
1809 DECL_RESULT (subprog_decl) = result_decl;
1810
1811 if (asm_name)
1812 {
1813 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1814
1815 /* The expand_main_function circuitry expects "main_identifier_node" to
1816 designate the DECL_NAME of the 'main' entry point, in turn expected
1817 to be declared as the "main" function literally by default. Ada
1818 program entry points are typically declared with a different name
1819 within the binder generated file, exported as 'main' to satisfy the
1820 system expectations. Force main_identifier_node in this case. */
1821 if (asm_name == main_identifier_node)
1822 DECL_NAME (subprog_decl) = main_identifier_node;
1823 }
1824
1825 /* Add this decl to the current binding level. */
1826 gnat_pushdecl (subprog_decl, gnat_node);
1827
1828 process_attributes (subprog_decl, attr_list);
1829
1830 /* Output the assembler code and/or RTL for the declaration. */
1831 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1832
1833 return subprog_decl;
1834 }
1835 \f
1836 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1837 body. This routine needs to be invoked before processing the declarations
1838 appearing in the subprogram. */
1839
1840 void
1841 begin_subprog_body (tree subprog_decl)
1842 {
1843 tree param_decl;
1844
1845 announce_function (subprog_decl);
1846
1847 /* This function is being defined. */
1848 TREE_STATIC (subprog_decl) = 1;
1849
1850 current_function_decl = subprog_decl;
1851
1852 /* Enter a new binding level and show that all the parameters belong to
1853 this function. */
1854 gnat_pushlevel ();
1855
1856 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1857 param_decl = DECL_CHAIN (param_decl))
1858 DECL_CONTEXT (param_decl) = subprog_decl;
1859
1860 make_decl_rtl (subprog_decl);
1861
1862 /* We handle pending sizes via the elaboration of types, so we don't need to
1863 save them. This causes them to be marked as part of the outer function
1864 and then discarded. */
1865 get_pending_sizes ();
1866 }
1867
1868 /* Finish the definition of the current subprogram BODY and finalize it. */
1869
1870 void
1871 end_subprog_body (tree body)
1872 {
1873 tree fndecl = current_function_decl;
1874
1875 /* Attach the BLOCK for this level to the function and pop the level. */
1876 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
1877 DECL_INITIAL (fndecl) = current_binding_level->block;
1878 gnat_poplevel ();
1879
1880 /* We handle pending sizes via the elaboration of types, so we don't
1881 need to save them. */
1882 get_pending_sizes ();
1883
1884 /* Mark the RESULT_DECL as being in this subprogram. */
1885 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
1886
1887 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
1888 if (TREE_CODE (body) == BIND_EXPR)
1889 {
1890 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
1891 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
1892 }
1893
1894 DECL_SAVED_TREE (fndecl) = body;
1895
1896 current_function_decl = DECL_CONTEXT (fndecl);
1897
1898 /* We cannot track the location of errors past this point. */
1899 error_gnat_node = Empty;
1900
1901 /* If we're only annotating types, don't actually compile this function. */
1902 if (type_annotate_only)
1903 return;
1904
1905 /* Dump functions before gimplification. */
1906 dump_function (TDI_original, fndecl);
1907
1908 /* ??? This special handling of nested functions is probably obsolete. */
1909 if (!DECL_CONTEXT (fndecl))
1910 cgraph_finalize_function (fndecl, false);
1911 else
1912 /* Register this function with cgraph just far enough to get it
1913 added to our parent's nested function list. */
1914 (void) cgraph_node (fndecl);
1915 }
1916
1917 tree
1918 gnat_builtin_function (tree decl)
1919 {
1920 gnat_pushdecl (decl, Empty);
1921 return decl;
1922 }
1923
1924 /* Return an integer type with the number of bits of precision given by
1925 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
1926 it is a signed type. */
1927
1928 tree
1929 gnat_type_for_size (unsigned precision, int unsignedp)
1930 {
1931 tree t;
1932 char type_name[20];
1933
1934 if (precision <= 2 * MAX_BITS_PER_WORD
1935 && signed_and_unsigned_types[precision][unsignedp])
1936 return signed_and_unsigned_types[precision][unsignedp];
1937
1938 if (unsignedp)
1939 t = make_unsigned_type (precision);
1940 else
1941 t = make_signed_type (precision);
1942
1943 if (precision <= 2 * MAX_BITS_PER_WORD)
1944 signed_and_unsigned_types[precision][unsignedp] = t;
1945
1946 if (!TYPE_NAME (t))
1947 {
1948 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
1949 TYPE_NAME (t) = get_identifier (type_name);
1950 }
1951
1952 return t;
1953 }
1954
1955 /* Likewise for floating-point types. */
1956
1957 static tree
1958 float_type_for_precision (int precision, enum machine_mode mode)
1959 {
1960 tree t;
1961 char type_name[20];
1962
1963 if (float_types[(int) mode])
1964 return float_types[(int) mode];
1965
1966 float_types[(int) mode] = t = make_node (REAL_TYPE);
1967 TYPE_PRECISION (t) = precision;
1968 layout_type (t);
1969
1970 gcc_assert (TYPE_MODE (t) == mode);
1971 if (!TYPE_NAME (t))
1972 {
1973 sprintf (type_name, "FLOAT_%d", precision);
1974 TYPE_NAME (t) = get_identifier (type_name);
1975 }
1976
1977 return t;
1978 }
1979
1980 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
1981 an unsigned type; otherwise a signed type is returned. */
1982
1983 tree
1984 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
1985 {
1986 if (mode == BLKmode)
1987 return NULL_TREE;
1988
1989 if (mode == VOIDmode)
1990 return void_type_node;
1991
1992 if (COMPLEX_MODE_P (mode))
1993 return NULL_TREE;
1994
1995 if (SCALAR_FLOAT_MODE_P (mode))
1996 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
1997
1998 if (SCALAR_INT_MODE_P (mode))
1999 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2000
2001 if (VECTOR_MODE_P (mode))
2002 {
2003 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2004 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2005 if (inner_type)
2006 return build_vector_type_for_mode (inner_type, mode);
2007 }
2008
2009 return NULL_TREE;
2010 }
2011
2012 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2013
2014 tree
2015 gnat_unsigned_type (tree type_node)
2016 {
2017 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2018
2019 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2020 {
2021 type = copy_node (type);
2022 TREE_TYPE (type) = type_node;
2023 }
2024 else if (TREE_TYPE (type_node)
2025 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2026 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2027 {
2028 type = copy_node (type);
2029 TREE_TYPE (type) = TREE_TYPE (type_node);
2030 }
2031
2032 return type;
2033 }
2034
2035 /* Return the signed version of a TYPE_NODE, a scalar type. */
2036
2037 tree
2038 gnat_signed_type (tree type_node)
2039 {
2040 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2041
2042 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2043 {
2044 type = copy_node (type);
2045 TREE_TYPE (type) = type_node;
2046 }
2047 else if (TREE_TYPE (type_node)
2048 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2049 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2050 {
2051 type = copy_node (type);
2052 TREE_TYPE (type) = TREE_TYPE (type_node);
2053 }
2054
2055 return type;
2056 }
2057
2058 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2059 transparently converted to each other. */
2060
2061 int
2062 gnat_types_compatible_p (tree t1, tree t2)
2063 {
2064 enum tree_code code;
2065
2066 /* This is the default criterion. */
2067 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2068 return 1;
2069
2070 /* We only check structural equivalence here. */
2071 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2072 return 0;
2073
2074 /* Vector types are also compatible if they have the same number of subparts
2075 and the same form of (scalar) element type. */
2076 if (code == VECTOR_TYPE
2077 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2078 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2079 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2080 return 1;
2081
2082 /* Array types are also compatible if they are constrained and have the same
2083 domain(s) and the same component type. */
2084 if (code == ARRAY_TYPE
2085 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2086 || (TYPE_DOMAIN (t1)
2087 && TYPE_DOMAIN (t2)
2088 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2089 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2090 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2091 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2092 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2093 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2094 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2095 return 1;
2096
2097 /* Padding record types are also compatible if they pad the same
2098 type and have the same constant size. */
2099 if (code == RECORD_TYPE
2100 && TYPE_PADDING_P (t1) && TYPE_PADDING_P (t2)
2101 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2102 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2103 return 1;
2104
2105 return 0;
2106 }
2107
2108 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2109
2110 bool
2111 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2112 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2113 {
2114 return TYPE_CI_CO_LIST (t) == cico_list
2115 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2116 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2117 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2118 }
2119 \f
2120 /* EXP is an expression for the size of an object. If this size contains
2121 discriminant references, replace them with the maximum (if MAX_P) or
2122 minimum (if !MAX_P) possible value of the discriminant. */
2123
2124 tree
2125 max_size (tree exp, bool max_p)
2126 {
2127 enum tree_code code = TREE_CODE (exp);
2128 tree type = TREE_TYPE (exp);
2129
2130 switch (TREE_CODE_CLASS (code))
2131 {
2132 case tcc_declaration:
2133 case tcc_constant:
2134 return exp;
2135
2136 case tcc_vl_exp:
2137 if (code == CALL_EXPR)
2138 {
2139 tree t, *argarray;
2140 int n, i;
2141
2142 t = maybe_inline_call_in_expr (exp);
2143 if (t)
2144 return max_size (t, max_p);
2145
2146 n = call_expr_nargs (exp);
2147 gcc_assert (n > 0);
2148 argarray = XALLOCAVEC (tree, n);
2149 for (i = 0; i < n; i++)
2150 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2151 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2152 }
2153 break;
2154
2155 case tcc_reference:
2156 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2157 modify. Otherwise, we treat it like a variable. */
2158 if (!CONTAINS_PLACEHOLDER_P (exp))
2159 return exp;
2160
2161 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2162 return
2163 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2164
2165 case tcc_comparison:
2166 return max_p ? size_one_node : size_zero_node;
2167
2168 case tcc_unary:
2169 case tcc_binary:
2170 case tcc_expression:
2171 switch (TREE_CODE_LENGTH (code))
2172 {
2173 case 1:
2174 if (code == NON_LVALUE_EXPR)
2175 return max_size (TREE_OPERAND (exp, 0), max_p);
2176 else
2177 return
2178 fold_build1 (code, type,
2179 max_size (TREE_OPERAND (exp, 0),
2180 code == NEGATE_EXPR ? !max_p : max_p));
2181
2182 case 2:
2183 if (code == COMPOUND_EXPR)
2184 return max_size (TREE_OPERAND (exp, 1), max_p);
2185
2186 {
2187 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2188 tree rhs = max_size (TREE_OPERAND (exp, 1),
2189 code == MINUS_EXPR ? !max_p : max_p);
2190
2191 /* Special-case wanting the maximum value of a MIN_EXPR.
2192 In that case, if one side overflows, return the other.
2193 sizetype is signed, but we know sizes are non-negative.
2194 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2195 overflowing and the RHS a variable. */
2196 if (max_p
2197 && code == MIN_EXPR
2198 && TREE_CODE (rhs) == INTEGER_CST
2199 && TREE_OVERFLOW (rhs))
2200 return lhs;
2201 else if (max_p
2202 && code == MIN_EXPR
2203 && TREE_CODE (lhs) == INTEGER_CST
2204 && TREE_OVERFLOW (lhs))
2205 return rhs;
2206 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2207 && TREE_CODE (lhs) == INTEGER_CST
2208 && TREE_OVERFLOW (lhs)
2209 && !TREE_CONSTANT (rhs))
2210 return lhs;
2211 else
2212 return fold_build2 (code, type, lhs, rhs);
2213 }
2214
2215 case 3:
2216 if (code == SAVE_EXPR)
2217 return exp;
2218 else if (code == COND_EXPR)
2219 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2220 max_size (TREE_OPERAND (exp, 1), max_p),
2221 max_size (TREE_OPERAND (exp, 2), max_p));
2222 }
2223
2224 /* Other tree classes cannot happen. */
2225 default:
2226 break;
2227 }
2228
2229 gcc_unreachable ();
2230 }
2231 \f
2232 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2233 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2234 Return a constructor for the template. */
2235
2236 tree
2237 build_template (tree template_type, tree array_type, tree expr)
2238 {
2239 VEC(constructor_elt,gc) *template_elts = NULL;
2240 tree bound_list = NULL_TREE;
2241 tree field;
2242
2243 while (TREE_CODE (array_type) == RECORD_TYPE
2244 && (TYPE_PADDING_P (array_type)
2245 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2246 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2247
2248 if (TREE_CODE (array_type) == ARRAY_TYPE
2249 || (TREE_CODE (array_type) == INTEGER_TYPE
2250 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2251 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2252
2253 /* First make the list for a CONSTRUCTOR for the template. Go down the
2254 field list of the template instead of the type chain because this
2255 array might be an Ada array of arrays and we can't tell where the
2256 nested arrays stop being the underlying object. */
2257
2258 for (field = TYPE_FIELDS (template_type); field;
2259 (bound_list
2260 ? (bound_list = TREE_CHAIN (bound_list))
2261 : (array_type = TREE_TYPE (array_type))),
2262 field = DECL_CHAIN (DECL_CHAIN (field)))
2263 {
2264 tree bounds, min, max;
2265
2266 /* If we have a bound list, get the bounds from there. Likewise
2267 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2268 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2269 This will give us a maximum range. */
2270 if (bound_list)
2271 bounds = TREE_VALUE (bound_list);
2272 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2273 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2274 else if (expr && TREE_CODE (expr) == PARM_DECL
2275 && DECL_BY_COMPONENT_PTR_P (expr))
2276 bounds = TREE_TYPE (field);
2277 else
2278 gcc_unreachable ();
2279
2280 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2281 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2282
2283 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2284 substitute it from OBJECT. */
2285 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2286 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2287
2288 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
2289 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
2290 }
2291
2292 return gnat_build_constructor (template_type, template_elts);
2293 }
2294 \f
2295 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
2296 being built; the new decl is chained on to the front of the list. */
2297
2298 static tree
2299 make_descriptor_field (const char *name, tree type, tree rec_type,
2300 tree initial, tree field_list)
2301 {
2302 tree field
2303 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
2304 NULL_TREE, 0, 0);
2305
2306 DECL_INITIAL (field) = initial;
2307 DECL_CHAIN (field) = field_list;
2308 return field;
2309 }
2310
2311 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
2312 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2313 type contains in its DECL_INITIAL the expression to use when a constructor
2314 is made for the type. GNAT_ENTITY is an entity used to print out an error
2315 message if the mechanism cannot be applied to an object of that type and
2316 also for the name. */
2317
2318 tree
2319 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2320 {
2321 tree record_type = make_node (RECORD_TYPE);
2322 tree pointer32_type, pointer64_type;
2323 tree field_list = NULL_TREE;
2324 int klass, ndim, i, dtype = 0;
2325 tree inner_type, tem;
2326 tree *idx_arr;
2327
2328 /* If TYPE is an unconstrained array, use the underlying array type. */
2329 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2330 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2331
2332 /* If this is an array, compute the number of dimensions in the array,
2333 get the index types, and point to the inner type. */
2334 if (TREE_CODE (type) != ARRAY_TYPE)
2335 ndim = 0;
2336 else
2337 for (ndim = 1, inner_type = type;
2338 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2339 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2340 ndim++, inner_type = TREE_TYPE (inner_type))
2341 ;
2342
2343 idx_arr = XALLOCAVEC (tree, ndim);
2344
2345 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2346 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2347 for (i = ndim - 1, inner_type = type;
2348 i >= 0;
2349 i--, inner_type = TREE_TYPE (inner_type))
2350 idx_arr[i] = TYPE_DOMAIN (inner_type);
2351 else
2352 for (i = 0, inner_type = type;
2353 i < ndim;
2354 i++, inner_type = TREE_TYPE (inner_type))
2355 idx_arr[i] = TYPE_DOMAIN (inner_type);
2356
2357 /* Now get the DTYPE value. */
2358 switch (TREE_CODE (type))
2359 {
2360 case INTEGER_TYPE:
2361 case ENUMERAL_TYPE:
2362 case BOOLEAN_TYPE:
2363 if (TYPE_VAX_FLOATING_POINT_P (type))
2364 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2365 {
2366 case 6:
2367 dtype = 10;
2368 break;
2369 case 9:
2370 dtype = 11;
2371 break;
2372 case 15:
2373 dtype = 27;
2374 break;
2375 }
2376 else
2377 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2378 {
2379 case 8:
2380 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2381 break;
2382 case 16:
2383 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2384 break;
2385 case 32:
2386 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2387 break;
2388 case 64:
2389 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2390 break;
2391 case 128:
2392 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2393 break;
2394 }
2395 break;
2396
2397 case REAL_TYPE:
2398 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2399 break;
2400
2401 case COMPLEX_TYPE:
2402 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2403 && TYPE_VAX_FLOATING_POINT_P (type))
2404 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2405 {
2406 case 6:
2407 dtype = 12;
2408 break;
2409 case 9:
2410 dtype = 13;
2411 break;
2412 case 15:
2413 dtype = 29;
2414 }
2415 else
2416 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2417 break;
2418
2419 case ARRAY_TYPE:
2420 dtype = 14;
2421 break;
2422
2423 default:
2424 break;
2425 }
2426
2427 /* Get the CLASS value. */
2428 switch (mech)
2429 {
2430 case By_Descriptor_A:
2431 case By_Short_Descriptor_A:
2432 klass = 4;
2433 break;
2434 case By_Descriptor_NCA:
2435 case By_Short_Descriptor_NCA:
2436 klass = 10;
2437 break;
2438 case By_Descriptor_SB:
2439 case By_Short_Descriptor_SB:
2440 klass = 15;
2441 break;
2442 case By_Descriptor:
2443 case By_Short_Descriptor:
2444 case By_Descriptor_S:
2445 case By_Short_Descriptor_S:
2446 default:
2447 klass = 1;
2448 break;
2449 }
2450
2451 /* Make the type for a descriptor for VMS. The first four fields are the
2452 same for all types. */
2453 field_list
2454 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
2455 size_in_bytes ((mech == By_Descriptor_A
2456 || mech == By_Short_Descriptor_A)
2457 ? inner_type : type),
2458 field_list);
2459 field_list
2460 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
2461 size_int (dtype), field_list);
2462 field_list
2463 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
2464 size_int (klass), field_list);
2465
2466 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2467 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2468
2469 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
2470 that we cannot build a template call to the CE routine as it would get a
2471 wrong source location; instead we use a second placeholder for it. */
2472 tem = build_unary_op (ADDR_EXPR, pointer64_type,
2473 build0 (PLACEHOLDER_EXPR, type));
2474 tem = build3 (COND_EXPR, pointer32_type,
2475 build_binary_op (GE_EXPR, boolean_type_node, tem,
2476 build_int_cstu (pointer64_type, 0x80000000)),
2477 build0 (PLACEHOLDER_EXPR, void_type_node),
2478 convert (pointer32_type, tem));
2479
2480 field_list
2481 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
2482 field_list);
2483
2484 switch (mech)
2485 {
2486 case By_Descriptor:
2487 case By_Short_Descriptor:
2488 case By_Descriptor_S:
2489 case By_Short_Descriptor_S:
2490 break;
2491
2492 case By_Descriptor_SB:
2493 case By_Short_Descriptor_SB:
2494 field_list
2495 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
2496 record_type,
2497 (TREE_CODE (type) == ARRAY_TYPE
2498 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2499 : size_zero_node),
2500 field_list);
2501 field_list
2502 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
2503 record_type,
2504 (TREE_CODE (type) == ARRAY_TYPE
2505 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2506 : size_zero_node),
2507 field_list);
2508 break;
2509
2510 case By_Descriptor_A:
2511 case By_Short_Descriptor_A:
2512 case By_Descriptor_NCA:
2513 case By_Short_Descriptor_NCA:
2514 field_list
2515 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2516 record_type, size_zero_node, field_list);
2517
2518 field_list
2519 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2520 record_type, size_zero_node, field_list);
2521
2522 field_list
2523 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2524 record_type,
2525 size_int ((mech == By_Descriptor_NCA
2526 || mech == By_Short_Descriptor_NCA)
2527 ? 0
2528 /* Set FL_COLUMN, FL_COEFF, and
2529 FL_BOUNDS. */
2530 : (TREE_CODE (type) == ARRAY_TYPE
2531 && TYPE_CONVENTION_FORTRAN_P
2532 (type)
2533 ? 224 : 192)),
2534 field_list);
2535
2536 field_list
2537 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2538 record_type, size_int (ndim), field_list);
2539
2540 field_list
2541 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
2542 record_type, size_in_bytes (type),
2543 field_list);
2544
2545 /* Now build a pointer to the 0,0,0... element. */
2546 tem = build0 (PLACEHOLDER_EXPR, type);
2547 for (i = 0, inner_type = type; i < ndim;
2548 i++, inner_type = TREE_TYPE (inner_type))
2549 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2550 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2551 NULL_TREE, NULL_TREE);
2552
2553 field_list
2554 = make_descriptor_field ("A0", pointer32_type, record_type,
2555 build1 (ADDR_EXPR, pointer32_type, tem),
2556 field_list);
2557
2558 /* Next come the addressing coefficients. */
2559 tem = size_one_node;
2560 for (i = 0; i < ndim; i++)
2561 {
2562 char fname[3];
2563 tree idx_length
2564 = size_binop (MULT_EXPR, tem,
2565 size_binop (PLUS_EXPR,
2566 size_binop (MINUS_EXPR,
2567 TYPE_MAX_VALUE (idx_arr[i]),
2568 TYPE_MIN_VALUE (idx_arr[i])),
2569 size_int (1)));
2570
2571 fname[0] = ((mech == By_Descriptor_NCA ||
2572 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2573 fname[1] = '0' + i, fname[2] = 0;
2574 field_list
2575 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2576 record_type, idx_length, field_list);
2577
2578 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2579 tem = idx_length;
2580 }
2581
2582 /* Finally here are the bounds. */
2583 for (i = 0; i < ndim; i++)
2584 {
2585 char fname[3];
2586
2587 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2588 field_list
2589 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2590 record_type, TYPE_MIN_VALUE (idx_arr[i]),
2591 field_list);
2592
2593 fname[0] = 'U';
2594 field_list
2595 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
2596 record_type, TYPE_MAX_VALUE (idx_arr[i]),
2597 field_list);
2598 }
2599 break;
2600
2601 default:
2602 post_error ("unsupported descriptor type for &", gnat_entity);
2603 }
2604
2605 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2606 finish_record_type (record_type, nreverse (field_list), 0, false);
2607 return record_type;
2608 }
2609
2610 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
2611 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
2612 type contains in its DECL_INITIAL the expression to use when a constructor
2613 is made for the type. GNAT_ENTITY is an entity used to print out an error
2614 message if the mechanism cannot be applied to an object of that type and
2615 also for the name. */
2616
2617 tree
2618 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2619 {
2620 tree record_type = make_node (RECORD_TYPE);
2621 tree pointer64_type;
2622 tree field_list = NULL_TREE;
2623 int klass, ndim, i, dtype = 0;
2624 tree inner_type, tem;
2625 tree *idx_arr;
2626
2627 /* If TYPE is an unconstrained array, use the underlying array type. */
2628 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2629 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2630
2631 /* If this is an array, compute the number of dimensions in the array,
2632 get the index types, and point to the inner type. */
2633 if (TREE_CODE (type) != ARRAY_TYPE)
2634 ndim = 0;
2635 else
2636 for (ndim = 1, inner_type = type;
2637 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2638 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2639 ndim++, inner_type = TREE_TYPE (inner_type))
2640 ;
2641
2642 idx_arr = XALLOCAVEC (tree, ndim);
2643
2644 if (mech != By_Descriptor_NCA
2645 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2646 for (i = ndim - 1, inner_type = type;
2647 i >= 0;
2648 i--, inner_type = TREE_TYPE (inner_type))
2649 idx_arr[i] = TYPE_DOMAIN (inner_type);
2650 else
2651 for (i = 0, inner_type = type;
2652 i < ndim;
2653 i++, inner_type = TREE_TYPE (inner_type))
2654 idx_arr[i] = TYPE_DOMAIN (inner_type);
2655
2656 /* Now get the DTYPE value. */
2657 switch (TREE_CODE (type))
2658 {
2659 case INTEGER_TYPE:
2660 case ENUMERAL_TYPE:
2661 case BOOLEAN_TYPE:
2662 if (TYPE_VAX_FLOATING_POINT_P (type))
2663 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2664 {
2665 case 6:
2666 dtype = 10;
2667 break;
2668 case 9:
2669 dtype = 11;
2670 break;
2671 case 15:
2672 dtype = 27;
2673 break;
2674 }
2675 else
2676 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2677 {
2678 case 8:
2679 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2680 break;
2681 case 16:
2682 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2683 break;
2684 case 32:
2685 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2686 break;
2687 case 64:
2688 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2689 break;
2690 case 128:
2691 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2692 break;
2693 }
2694 break;
2695
2696 case REAL_TYPE:
2697 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2698 break;
2699
2700 case COMPLEX_TYPE:
2701 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2702 && TYPE_VAX_FLOATING_POINT_P (type))
2703 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2704 {
2705 case 6:
2706 dtype = 12;
2707 break;
2708 case 9:
2709 dtype = 13;
2710 break;
2711 case 15:
2712 dtype = 29;
2713 }
2714 else
2715 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2716 break;
2717
2718 case ARRAY_TYPE:
2719 dtype = 14;
2720 break;
2721
2722 default:
2723 break;
2724 }
2725
2726 /* Get the CLASS value. */
2727 switch (mech)
2728 {
2729 case By_Descriptor_A:
2730 klass = 4;
2731 break;
2732 case By_Descriptor_NCA:
2733 klass = 10;
2734 break;
2735 case By_Descriptor_SB:
2736 klass = 15;
2737 break;
2738 case By_Descriptor:
2739 case By_Descriptor_S:
2740 default:
2741 klass = 1;
2742 break;
2743 }
2744
2745 /* Make the type for a 64-bit descriptor for VMS. The first six fields
2746 are the same for all types. */
2747 field_list
2748 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
2749 record_type, size_int (1), field_list);
2750 field_list
2751 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
2752 record_type, size_int (dtype), field_list);
2753 field_list
2754 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
2755 record_type, size_int (klass), field_list);
2756 field_list
2757 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
2758 record_type, ssize_int (-1), field_list);
2759 field_list
2760 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
2761 record_type,
2762 size_in_bytes (mech == By_Descriptor_A
2763 ? inner_type : type),
2764 field_list);
2765
2766 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2767
2768 field_list
2769 = make_descriptor_field ("POINTER", pointer64_type, record_type,
2770 build_unary_op (ADDR_EXPR, pointer64_type,
2771 build0 (PLACEHOLDER_EXPR, type)),
2772 field_list);
2773
2774 switch (mech)
2775 {
2776 case By_Descriptor:
2777 case By_Descriptor_S:
2778 break;
2779
2780 case By_Descriptor_SB:
2781 field_list
2782 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
2783 record_type,
2784 (TREE_CODE (type) == ARRAY_TYPE
2785 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
2786 : size_zero_node),
2787 field_list);
2788 field_list
2789 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
2790 record_type,
2791 (TREE_CODE (type) == ARRAY_TYPE
2792 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
2793 : size_zero_node),
2794 field_list);
2795 break;
2796
2797 case By_Descriptor_A:
2798 case By_Descriptor_NCA:
2799 field_list
2800 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
2801 record_type, size_zero_node, field_list);
2802
2803 field_list
2804 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
2805 record_type, size_zero_node, field_list);
2806
2807 dtype = (mech == By_Descriptor_NCA
2808 ? 0
2809 /* Set FL_COLUMN, FL_COEFF, and
2810 FL_BOUNDS. */
2811 : (TREE_CODE (type) == ARRAY_TYPE
2812 && TYPE_CONVENTION_FORTRAN_P (type)
2813 ? 224 : 192));
2814 field_list
2815 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
2816 record_type, size_int (dtype),
2817 field_list);
2818
2819 field_list
2820 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
2821 record_type, size_int (ndim), field_list);
2822
2823 field_list
2824 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
2825 record_type, size_int (0), field_list);
2826 field_list
2827 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
2828 record_type, size_in_bytes (type),
2829 field_list);
2830
2831 /* Now build a pointer to the 0,0,0... element. */
2832 tem = build0 (PLACEHOLDER_EXPR, type);
2833 for (i = 0, inner_type = type; i < ndim;
2834 i++, inner_type = TREE_TYPE (inner_type))
2835 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2836 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2837 NULL_TREE, NULL_TREE);
2838
2839 field_list
2840 = make_descriptor_field ("A0", pointer64_type, record_type,
2841 build1 (ADDR_EXPR, pointer64_type, tem),
2842 field_list);
2843
2844 /* Next come the addressing coefficients. */
2845 tem = size_one_node;
2846 for (i = 0; i < ndim; i++)
2847 {
2848 char fname[3];
2849 tree idx_length
2850 = size_binop (MULT_EXPR, tem,
2851 size_binop (PLUS_EXPR,
2852 size_binop (MINUS_EXPR,
2853 TYPE_MAX_VALUE (idx_arr[i]),
2854 TYPE_MIN_VALUE (idx_arr[i])),
2855 size_int (1)));
2856
2857 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2858 fname[1] = '0' + i, fname[2] = 0;
2859 field_list
2860 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2861 record_type, idx_length, field_list);
2862
2863 if (mech == By_Descriptor_NCA)
2864 tem = idx_length;
2865 }
2866
2867 /* Finally here are the bounds. */
2868 for (i = 0; i < ndim; i++)
2869 {
2870 char fname[3];
2871
2872 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2873 field_list
2874 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2875 record_type,
2876 TYPE_MIN_VALUE (idx_arr[i]), field_list);
2877
2878 fname[0] = 'U';
2879 field_list
2880 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
2881 record_type,
2882 TYPE_MAX_VALUE (idx_arr[i]), field_list);
2883 }
2884 break;
2885
2886 default:
2887 post_error ("unsupported descriptor type for &", gnat_entity);
2888 }
2889
2890 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
2891 finish_record_type (record_type, nreverse (field_list), 0, false);
2892 return record_type;
2893 }
2894
2895 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
2896 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
2897
2898 tree
2899 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
2900 {
2901 VEC(constructor_elt,gc) *v = NULL;
2902 tree field;
2903
2904 gnu_expr = maybe_unconstrained_array (gnu_expr);
2905 gnu_expr = gnat_protect_expr (gnu_expr);
2906 gnat_mark_addressable (gnu_expr);
2907
2908 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
2909 routine in case we have a 32-bit descriptor. */
2910 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
2911 build_call_raise (CE_Range_Check_Failed, gnat_actual,
2912 N_Raise_Constraint_Error),
2913 gnu_expr);
2914
2915 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
2916 {
2917 tree value
2918 = convert (TREE_TYPE (field),
2919 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
2920 gnu_expr));
2921 CONSTRUCTOR_APPEND_ELT (v, field, value);
2922 }
2923
2924 return gnat_build_constructor (gnu_type, v);
2925 }
2926
2927 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
2928 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
2929 which the VMS descriptor is passed. */
2930
2931 static tree
2932 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
2933 {
2934 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
2935 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
2936 /* The CLASS field is the 3rd field in the descriptor. */
2937 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
2938 /* The POINTER field is the 6th field in the descriptor. */
2939 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
2940
2941 /* Retrieve the value of the POINTER field. */
2942 tree gnu_expr64
2943 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
2944
2945 if (POINTER_TYPE_P (gnu_type))
2946 return convert (gnu_type, gnu_expr64);
2947
2948 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
2949 {
2950 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
2951 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
2952 tree template_type = TREE_TYPE (p_bounds_type);
2953 tree min_field = TYPE_FIELDS (template_type);
2954 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
2955 tree template_tree, template_addr, aflags, dimct, t, u;
2956 /* See the head comment of build_vms_descriptor. */
2957 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
2958 tree lfield, ufield;
2959 VEC(constructor_elt,gc) *v;
2960
2961 /* Convert POINTER to the pointer-to-array type. */
2962 gnu_expr64 = convert (p_array_type, gnu_expr64);
2963
2964 switch (iklass)
2965 {
2966 case 1: /* Class S */
2967 case 15: /* Class SB */
2968 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
2969 v = VEC_alloc (constructor_elt, gc, 2);
2970 t = DECL_CHAIN (DECL_CHAIN (klass));
2971 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2972 CONSTRUCTOR_APPEND_ELT (v, min_field,
2973 convert (TREE_TYPE (min_field),
2974 integer_one_node));
2975 CONSTRUCTOR_APPEND_ELT (v, max_field,
2976 convert (TREE_TYPE (max_field), t));
2977 template_tree = gnat_build_constructor (template_type, v);
2978 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
2979
2980 /* For class S, we are done. */
2981 if (iklass == 1)
2982 break;
2983
2984 /* Test that we really have a SB descriptor, like DEC Ada. */
2985 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
2986 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
2987 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
2988 /* If so, there is already a template in the descriptor and
2989 it is located right after the POINTER field. The fields are
2990 64bits so they must be repacked. */
2991 t = TREE_CHAIN (pointer);
2992 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2993 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
2994
2995 t = TREE_CHAIN (t);
2996 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
2997 ufield = convert
2998 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
2999
3000 /* Build the template in the form of a constructor. */
3001 v = VEC_alloc (constructor_elt, gc, 2);
3002 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3003 CONSTRUCTOR_APPEND_ELT (v, TREE_CHAIN (TYPE_FIELDS (template_type)),
3004 ufield);
3005 template_tree = gnat_build_constructor (template_type, v);
3006
3007 /* Otherwise use the {1, LENGTH} template we build above. */
3008 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3009 build_unary_op (ADDR_EXPR, p_bounds_type,
3010 template_tree),
3011 template_addr);
3012 break;
3013
3014 case 4: /* Class A */
3015 /* The AFLAGS field is the 3rd field after the pointer in the
3016 descriptor. */
3017 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3018 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3019 /* The DIMCT field is the next field in the descriptor after
3020 aflags. */
3021 t = TREE_CHAIN (t);
3022 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3023 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3024 or FL_COEFF or FL_BOUNDS not set. */
3025 u = build_int_cst (TREE_TYPE (aflags), 192);
3026 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3027 build_binary_op (NE_EXPR, boolean_type_node,
3028 dimct,
3029 convert (TREE_TYPE (dimct),
3030 size_one_node)),
3031 build_binary_op (NE_EXPR, boolean_type_node,
3032 build2 (BIT_AND_EXPR,
3033 TREE_TYPE (aflags),
3034 aflags, u),
3035 u));
3036 /* There is already a template in the descriptor and it is located
3037 in block 3. The fields are 64bits so they must be repacked. */
3038 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3039 (t)))));
3040 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3041 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3042
3043 t = TREE_CHAIN (t);
3044 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3045 ufield = convert
3046 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3047
3048 /* Build the template in the form of a constructor. */
3049 v = VEC_alloc (constructor_elt, gc, 2);
3050 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3051 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3052 ufield);
3053 template_tree = gnat_build_constructor (template_type, v);
3054 template_tree = build3 (COND_EXPR, template_type, u,
3055 build_call_raise (CE_Length_Check_Failed, Empty,
3056 N_Raise_Constraint_Error),
3057 template_tree);
3058 template_addr
3059 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3060 break;
3061
3062 case 10: /* Class NCA */
3063 default:
3064 post_error ("unsupported descriptor type for &", gnat_subprog);
3065 template_addr = integer_zero_node;
3066 break;
3067 }
3068
3069 /* Build the fat pointer in the form of a constructor. */
3070 v = VEC_alloc (constructor_elt, gc, 2);
3071 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3072 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3073 template_addr);
3074 return gnat_build_constructor (gnu_type, v);
3075 }
3076
3077 else
3078 gcc_unreachable ();
3079 }
3080
3081 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3082 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3083 which the VMS descriptor is passed. */
3084
3085 static tree
3086 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3087 {
3088 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3089 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3090 /* The CLASS field is the 3rd field in the descriptor. */
3091 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3092 /* The POINTER field is the 4th field in the descriptor. */
3093 tree pointer = DECL_CHAIN (klass);
3094
3095 /* Retrieve the value of the POINTER field. */
3096 tree gnu_expr32
3097 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3098
3099 if (POINTER_TYPE_P (gnu_type))
3100 return convert (gnu_type, gnu_expr32);
3101
3102 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3103 {
3104 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3105 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3106 tree template_type = TREE_TYPE (p_bounds_type);
3107 tree min_field = TYPE_FIELDS (template_type);
3108 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3109 tree template_tree, template_addr, aflags, dimct, t, u;
3110 /* See the head comment of build_vms_descriptor. */
3111 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3112 VEC(constructor_elt,gc) *v;
3113
3114 /* Convert POINTER to the pointer-to-array type. */
3115 gnu_expr32 = convert (p_array_type, gnu_expr32);
3116
3117 switch (iklass)
3118 {
3119 case 1: /* Class S */
3120 case 15: /* Class SB */
3121 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3122 v = VEC_alloc (constructor_elt, gc, 2);
3123 t = TYPE_FIELDS (desc_type);
3124 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3125 CONSTRUCTOR_APPEND_ELT (v, min_field,
3126 convert (TREE_TYPE (min_field),
3127 integer_one_node));
3128 CONSTRUCTOR_APPEND_ELT (v, max_field,
3129 convert (TREE_TYPE (max_field), t));
3130 template_tree = gnat_build_constructor (template_type, v);
3131 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3132
3133 /* For class S, we are done. */
3134 if (iklass == 1)
3135 break;
3136
3137 /* Test that we really have a SB descriptor, like DEC Ada. */
3138 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3139 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3140 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3141 /* If so, there is already a template in the descriptor and
3142 it is located right after the POINTER field. */
3143 t = TREE_CHAIN (pointer);
3144 template_tree
3145 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3146 /* Otherwise use the {1, LENGTH} template we build above. */
3147 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3148 build_unary_op (ADDR_EXPR, p_bounds_type,
3149 template_tree),
3150 template_addr);
3151 break;
3152
3153 case 4: /* Class A */
3154 /* The AFLAGS field is the 7th field in the descriptor. */
3155 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3156 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3157 /* The DIMCT field is the 8th field in the descriptor. */
3158 t = TREE_CHAIN (t);
3159 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3160 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3161 or FL_COEFF or FL_BOUNDS not set. */
3162 u = build_int_cst (TREE_TYPE (aflags), 192);
3163 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3164 build_binary_op (NE_EXPR, boolean_type_node,
3165 dimct,
3166 convert (TREE_TYPE (dimct),
3167 size_one_node)),
3168 build_binary_op (NE_EXPR, boolean_type_node,
3169 build2 (BIT_AND_EXPR,
3170 TREE_TYPE (aflags),
3171 aflags, u),
3172 u));
3173 /* There is already a template in the descriptor and it is
3174 located at the start of block 3 (12th field). */
3175 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
3176 template_tree
3177 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3178 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
3179 build_call_raise (CE_Length_Check_Failed, Empty,
3180 N_Raise_Constraint_Error),
3181 template_tree);
3182 template_addr
3183 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3184 break;
3185
3186 case 10: /* Class NCA */
3187 default:
3188 post_error ("unsupported descriptor type for &", gnat_subprog);
3189 template_addr = integer_zero_node;
3190 break;
3191 }
3192
3193 /* Build the fat pointer in the form of a constructor. */
3194 v = VEC_alloc (constructor_elt, gc, 2);
3195 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
3196 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3197 template_addr);
3198
3199 return gnat_build_constructor (gnu_type, v);
3200 }
3201
3202 else
3203 gcc_unreachable ();
3204 }
3205
3206 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3207 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3208 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
3209 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
3210 passed. */
3211
3212 static tree
3213 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3214 bool by_ref, Entity_Id gnat_subprog)
3215 {
3216 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3217 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3218 tree mbo = TYPE_FIELDS (desc_type);
3219 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3220 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
3221 tree real_type, is64bit, gnu_expr32, gnu_expr64;
3222
3223 if (by_ref)
3224 real_type = TREE_TYPE (gnu_type);
3225 else
3226 real_type = gnu_type;
3227
3228 /* If the field name is not MBO, it must be 32-bit and no alternate.
3229 Otherwise primary must be 64-bit and alternate 32-bit. */
3230 if (strcmp (mbostr, "MBO") != 0)
3231 {
3232 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3233 if (by_ref)
3234 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
3235 return ret;
3236 }
3237
3238 /* Build the test for 64-bit descriptor. */
3239 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3240 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3241 is64bit
3242 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
3243 build_binary_op (EQ_EXPR, boolean_type_node,
3244 convert (integer_type_node, mbo),
3245 integer_one_node),
3246 build_binary_op (EQ_EXPR, boolean_type_node,
3247 convert (integer_type_node, mbmo),
3248 integer_minus_one_node));
3249
3250 /* Build the 2 possible end results. */
3251 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
3252 if (by_ref)
3253 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
3254 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3255 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
3256 if (by_ref)
3257 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
3258
3259 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3260 }
3261
3262 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3263 and the GNAT node GNAT_SUBPROG. */
3264
3265 void
3266 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3267 {
3268 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3269 tree gnu_subprog_param, gnu_stub_param, gnu_param;
3270 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3271 VEC(tree,gc) *gnu_param_vec = NULL;
3272
3273 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3274
3275 /* Initialize the information structure for the function. */
3276 allocate_struct_function (gnu_stub_decl, false);
3277 set_cfun (NULL);
3278
3279 begin_subprog_body (gnu_stub_decl);
3280
3281 start_stmt_group ();
3282 gnat_pushlevel ();
3283
3284 /* Loop over the parameters of the stub and translate any of them
3285 passed by descriptor into a by reference one. */
3286 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3287 gnu_subprog_param = DECL_ARGUMENTS (gnu_subprog);
3288 gnu_stub_param;
3289 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3290 gnu_subprog_param = TREE_CHAIN (gnu_subprog_param))
3291 {
3292 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3293 {
3294 gcc_assert (DECL_BY_REF_P (gnu_subprog_param));
3295 gnu_param
3296 = convert_vms_descriptor (TREE_TYPE (gnu_subprog_param),
3297 gnu_stub_param,
3298 DECL_PARM_ALT_TYPE (gnu_stub_param),
3299 DECL_BY_DOUBLE_REF_P (gnu_subprog_param),
3300 gnat_subprog);
3301 }
3302 else
3303 gnu_param = gnu_stub_param;
3304
3305 VEC_safe_push (tree, gc, gnu_param_vec, gnu_param);
3306 }
3307
3308 /* Invoke the internal subprogram. */
3309 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3310 gnu_subprog);
3311 gnu_subprog_call = build_call_vec (TREE_TYPE (gnu_subprog_type),
3312 gnu_subprog_addr, gnu_param_vec);
3313
3314 /* Propagate the return value, if any. */
3315 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3316 add_stmt (gnu_subprog_call);
3317 else
3318 add_stmt (build_return_expr (DECL_RESULT (gnu_stub_decl),
3319 gnu_subprog_call));
3320
3321 gnat_poplevel ();
3322 end_subprog_body (end_stmt_group ());
3323 }
3324 \f
3325 /* Build a type to be used to represent an aliased object whose nominal type
3326 is an unconstrained array. This consists of a RECORD_TYPE containing a
3327 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3328 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3329 an arbitrary unconstrained object. Use NAME as the name of the record.
3330 DEBUG_INFO_P is true if we need to write debug information for the type. */
3331
3332 tree
3333 build_unc_object_type (tree template_type, tree object_type, tree name,
3334 bool debug_info_p)
3335 {
3336 tree type = make_node (RECORD_TYPE);
3337 tree template_field
3338 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3339 NULL_TREE, NULL_TREE, 0, 1);
3340 tree array_field
3341 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3342 NULL_TREE, NULL_TREE, 0, 1);
3343
3344 TYPE_NAME (type) = name;
3345 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3346 DECL_CHAIN (template_field) = array_field;
3347 finish_record_type (type, template_field, 0, true);
3348
3349 /* Declare it now since it will never be declared otherwise. This is
3350 necessary to ensure that its subtrees are properly marked. */
3351 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
3352
3353 return type;
3354 }
3355
3356 /* Same, taking a thin or fat pointer type instead of a template type. */
3357
3358 tree
3359 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3360 tree name, bool debug_info_p)
3361 {
3362 tree template_type;
3363
3364 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3365
3366 template_type
3367 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3368 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3369 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3370
3371 return
3372 build_unc_object_type (template_type, object_type, name, debug_info_p);
3373 }
3374
3375 /* Shift the component offsets within an unconstrained object TYPE to make it
3376 suitable for use as a designated type for thin pointers. */
3377
3378 void
3379 shift_unc_components_for_thin_pointers (tree type)
3380 {
3381 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3382 allocated past the BOUNDS template. The designated type is adjusted to
3383 have ARRAY at position zero and the template at a negative offset, so
3384 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3385
3386 tree bounds_field = TYPE_FIELDS (type);
3387 tree array_field = DECL_CHAIN (TYPE_FIELDS (type));
3388
3389 DECL_FIELD_OFFSET (bounds_field)
3390 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3391
3392 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3393 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3394 }
3395 \f
3396 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3397 In the normal case this is just two adjustments, but we have more to
3398 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3399
3400 void
3401 update_pointer_to (tree old_type, tree new_type)
3402 {
3403 tree ptr = TYPE_POINTER_TO (old_type);
3404 tree ref = TYPE_REFERENCE_TO (old_type);
3405 tree t;
3406
3407 /* If this is the main variant, process all the other variants first. */
3408 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3409 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3410 update_pointer_to (t, new_type);
3411
3412 /* If no pointers and no references, we are done. */
3413 if (!ptr && !ref)
3414 return;
3415
3416 /* Merge the old type qualifiers in the new type.
3417
3418 Each old variant has qualifiers for specific reasons, and the new
3419 designated type as well. Each set of qualifiers represents useful
3420 information grabbed at some point, and merging the two simply unifies
3421 these inputs into the final type description.
3422
3423 Consider for instance a volatile type frozen after an access to constant
3424 type designating it; after the designated type's freeze, we get here with
3425 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3426 when the access type was processed. We will make a volatile and readonly
3427 designated type, because that's what it really is.
3428
3429 We might also get here for a non-dummy OLD_TYPE variant with different
3430 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3431 to private record type elaboration (see the comments around the call to
3432 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3433 the qualifiers in those cases too, to avoid accidentally discarding the
3434 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3435 new_type
3436 = build_qualified_type (new_type,
3437 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3438
3439 /* If old type and new type are identical, there is nothing to do. */
3440 if (old_type == new_type)
3441 return;
3442
3443 /* Otherwise, first handle the simple case. */
3444 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3445 {
3446 tree new_ptr, new_ref;
3447
3448 /* If pointer or reference already points to new type, nothing to do.
3449 This can happen as update_pointer_to can be invoked multiple times
3450 on the same couple of types because of the type variants. */
3451 if ((ptr && TREE_TYPE (ptr) == new_type)
3452 || (ref && TREE_TYPE (ref) == new_type))
3453 return;
3454
3455 /* Chain PTR and its variants at the end. */
3456 new_ptr = TYPE_POINTER_TO (new_type);
3457 if (new_ptr)
3458 {
3459 while (TYPE_NEXT_PTR_TO (new_ptr))
3460 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3461 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3462 }
3463 else
3464 TYPE_POINTER_TO (new_type) = ptr;
3465
3466 /* Now adjust them. */
3467 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3468 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3469 TREE_TYPE (t) = new_type;
3470 TYPE_POINTER_TO (old_type) = NULL_TREE;
3471
3472 /* Chain REF and its variants at the end. */
3473 new_ref = TYPE_REFERENCE_TO (new_type);
3474 if (new_ref)
3475 {
3476 while (TYPE_NEXT_REF_TO (new_ref))
3477 new_ref = TYPE_NEXT_REF_TO (new_ref);
3478 TYPE_NEXT_REF_TO (new_ref) = ref;
3479 }
3480 else
3481 TYPE_REFERENCE_TO (new_type) = ref;
3482
3483 /* Now adjust them. */
3484 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3485 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3486 TREE_TYPE (t) = new_type;
3487 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3488 }
3489
3490 /* Now deal with the unconstrained array case. In this case the pointer
3491 is actually a record where both fields are pointers to dummy nodes.
3492 Turn them into pointers to the correct types using update_pointer_to. */
3493 else
3494 {
3495 tree new_ptr = TYPE_MAIN_VARIANT (TYPE_POINTER_TO (new_type));
3496 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3497 tree array_field, bounds_field, new_ref, last = NULL_TREE;
3498
3499 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3500
3501 /* If PTR already points to new type, nothing to do. This can happen
3502 since update_pointer_to can be invoked multiple times on the same
3503 couple of types because of the type variants. */
3504 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3505 return;
3506
3507 array_field = TYPE_FIELDS (ptr);
3508 bounds_field = DECL_CHAIN (array_field);
3509
3510 /* Make pointers to the dummy template point to the real template. */
3511 update_pointer_to
3512 (TREE_TYPE (TREE_TYPE (bounds_field)),
3513 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3514
3515 /* The references to the template bounds present in the array type use
3516 the bounds field of NEW_PTR through a PLACEHOLDER_EXPR. Since we
3517 are going to merge PTR in NEW_PTR, we must rework these references
3518 to use the bounds field of PTR instead. */
3519 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3520 build0 (PLACEHOLDER_EXPR, new_ptr),
3521 bounds_field, NULL_TREE);
3522
3523 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3524 to the dummy array point to it. */
3525 update_pointer_to
3526 (TREE_TYPE (TREE_TYPE (array_field)),
3527 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3528 DECL_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3529
3530 /* Merge PTR in NEW_PTR. */
3531 DECL_FIELD_CONTEXT (array_field) = new_ptr;
3532 DECL_FIELD_CONTEXT (bounds_field) = new_ptr;
3533 for (t = new_ptr; t; last = t, t = TYPE_NEXT_VARIANT (t))
3534 TYPE_FIELDS (t) = TYPE_FIELDS (ptr);
3535 TYPE_ALIAS_SET (new_ptr) = TYPE_ALIAS_SET (ptr);
3536
3537 /* Chain PTR and its variants at the end. */
3538 TYPE_NEXT_VARIANT (last) = TYPE_MAIN_VARIANT (ptr);
3539
3540 /* Now adjust them. */
3541 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3542 {
3543 TYPE_MAIN_VARIANT (t) = new_ptr;
3544 SET_TYPE_UNCONSTRAINED_ARRAY (t, new_type);
3545
3546 /* And show the original pointer NEW_PTR to the debugger. This is
3547 the counterpart of the special processing for fat pointer types
3548 in gnat_pushdecl, but when the unconstrained array type is only
3549 frozen after access types to it. */
3550 if (TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL)
3551 {
3552 DECL_ORIGINAL_TYPE (TYPE_NAME (t)) = new_ptr;
3553 DECL_ARTIFICIAL (TYPE_NAME (t)) = 0;
3554 }
3555 }
3556
3557 /* Now handle updating the allocation record, what the thin pointer
3558 points to. Update all pointers from the old record into the new
3559 one, update the type of the array field, and recompute the size. */
3560 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3561 TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_obj_rec)))
3562 = TREE_TYPE (TREE_TYPE (array_field));
3563
3564 /* The size recomputation needs to account for alignment constraints, so
3565 we let layout_type work it out. This will reset the field offsets to
3566 what they would be in a regular record, so we shift them back to what
3567 we want them to be for a thin pointer designated type afterwards. */
3568 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = NULL_TREE;
3569 DECL_SIZE (DECL_CHAIN (TYPE_FIELDS (new_obj_rec))) = NULL_TREE;
3570 TYPE_SIZE (new_obj_rec) = NULL_TREE;
3571 layout_type (new_obj_rec);
3572 shift_unc_components_for_thin_pointers (new_obj_rec);
3573
3574 /* We are done, at last. */
3575 rest_of_record_type_compilation (ptr);
3576 }
3577 }
3578 \f
3579 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3580 unconstrained one. This involves making or finding a template. */
3581
3582 static tree
3583 convert_to_fat_pointer (tree type, tree expr)
3584 {
3585 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3586 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3587 tree etype = TREE_TYPE (expr);
3588 tree template_tree;
3589 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3590
3591 /* If EXPR is null, make a fat pointer that contains null pointers to the
3592 template and array. */
3593 if (integer_zerop (expr))
3594 {
3595 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3596 convert (p_array_type, expr));
3597 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3598 convert (build_pointer_type (template_type),
3599 expr));
3600 return gnat_build_constructor (type, v);
3601 }
3602
3603 /* If EXPR is a thin pointer, make template and data from the record.. */
3604 else if (TYPE_IS_THIN_POINTER_P (etype))
3605 {
3606 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3607
3608 expr = gnat_protect_expr (expr);
3609 if (TREE_CODE (expr) == ADDR_EXPR)
3610 expr = TREE_OPERAND (expr, 0);
3611 else
3612 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3613
3614 template_tree = build_component_ref (expr, NULL_TREE, fields, false);
3615 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3616 build_component_ref (expr, NULL_TREE,
3617 DECL_CHAIN (fields), false));
3618 }
3619
3620 /* Otherwise, build the constructor for the template. */
3621 else
3622 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
3623
3624 /* The final result is a constructor for the fat pointer.
3625
3626 If EXPR is an argument of a foreign convention subprogram, the type it
3627 points to is directly the component type. In this case, the expression
3628 type may not match the corresponding FIELD_DECL type at this point, so we
3629 call "convert" here to fix that up if necessary. This type consistency is
3630 required, for instance because it ensures that possible later folding of
3631 COMPONENT_REFs against this constructor always yields something of the
3632 same type as the initial reference.
3633
3634 Note that the call to "build_template" above is still fine because it
3635 will only refer to the provided TEMPLATE_TYPE in this case. */
3636 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3637 convert (p_array_type, expr));
3638 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3639 build_unary_op (ADDR_EXPR, NULL_TREE,
3640 template_tree));
3641 return gnat_build_constructor (type, v);
3642 }
3643 \f
3644 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3645 is something that is a fat pointer, so convert to it first if it EXPR
3646 is not already a fat pointer. */
3647
3648 static tree
3649 convert_to_thin_pointer (tree type, tree expr)
3650 {
3651 if (!TYPE_IS_FAT_POINTER_P (TREE_TYPE (expr)))
3652 expr
3653 = convert_to_fat_pointer
3654 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3655
3656 /* We get the pointer to the data and use a NOP_EXPR to make it the
3657 proper GCC type. */
3658 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3659 false);
3660 expr = build1 (NOP_EXPR, type, expr);
3661
3662 return expr;
3663 }
3664 \f
3665 /* Create an expression whose value is that of EXPR,
3666 converted to type TYPE. The TREE_TYPE of the value
3667 is always TYPE. This function implements all reasonable
3668 conversions; callers should filter out those that are
3669 not permitted by the language being compiled. */
3670
3671 tree
3672 convert (tree type, tree expr)
3673 {
3674 tree etype = TREE_TYPE (expr);
3675 enum tree_code ecode = TREE_CODE (etype);
3676 enum tree_code code = TREE_CODE (type);
3677
3678 /* If the expression is already of the right type, we are done. */
3679 if (etype == type)
3680 return expr;
3681
3682 /* If both input and output have padding and are of variable size, do this
3683 as an unchecked conversion. Likewise if one is a mere variant of the
3684 other, so we avoid a pointless unpad/repad sequence. */
3685 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3686 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3687 && (!TREE_CONSTANT (TYPE_SIZE (type))
3688 || !TREE_CONSTANT (TYPE_SIZE (etype))
3689 || gnat_types_compatible_p (type, etype)
3690 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3691 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3692 ;
3693
3694 /* If the output type has padding, convert to the inner type and make a
3695 constructor to build the record, unless a variable size is involved. */
3696 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3697 {
3698 VEC(constructor_elt,gc) *v;
3699
3700 /* If we previously converted from another type and our type is
3701 of variable size, remove the conversion to avoid the need for
3702 variable-sized temporaries. Likewise for a conversion between
3703 original and packable version. */
3704 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3705 && (!TREE_CONSTANT (TYPE_SIZE (type))
3706 || (ecode == RECORD_TYPE
3707 && TYPE_NAME (etype)
3708 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3709 expr = TREE_OPERAND (expr, 0);
3710
3711 /* If we are just removing the padding from expr, convert the original
3712 object if we have variable size in order to avoid the need for some
3713 variable-sized temporaries. Likewise if the padding is a variant
3714 of the other, so we avoid a pointless unpad/repad sequence. */
3715 if (TREE_CODE (expr) == COMPONENT_REF
3716 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3717 && (!TREE_CONSTANT (TYPE_SIZE (type))
3718 || gnat_types_compatible_p (type,
3719 TREE_TYPE (TREE_OPERAND (expr, 0)))
3720 || (ecode == RECORD_TYPE
3721 && TYPE_NAME (etype)
3722 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3723 return convert (type, TREE_OPERAND (expr, 0));
3724
3725 /* If the inner type is of self-referential size and the expression type
3726 is a record, do this as an unchecked conversion. But first pad the
3727 expression if possible to have the same size on both sides. */
3728 if (ecode == RECORD_TYPE
3729 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3730 {
3731 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
3732 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
3733 false, false, false, true),
3734 expr);
3735 return unchecked_convert (type, expr, false);
3736 }
3737
3738 /* If we are converting between array types with variable size, do the
3739 final conversion as an unchecked conversion, again to avoid the need
3740 for some variable-sized temporaries. If valid, this conversion is
3741 very likely purely technical and without real effects. */
3742 if (ecode == ARRAY_TYPE
3743 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
3744 && !TREE_CONSTANT (TYPE_SIZE (etype))
3745 && !TREE_CONSTANT (TYPE_SIZE (type)))
3746 return unchecked_convert (type,
3747 convert (TREE_TYPE (TYPE_FIELDS (type)),
3748 expr),
3749 false);
3750
3751 v = VEC_alloc (constructor_elt, gc, 1);
3752 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3753 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
3754 return gnat_build_constructor (type, v);
3755 }
3756
3757 /* If the input type has padding, remove it and convert to the output type.
3758 The conditions ordering is arranged to ensure that the output type is not
3759 a padding type here, as it is not clear whether the conversion would
3760 always be correct if this was to happen. */
3761 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
3762 {
3763 tree unpadded;
3764
3765 /* If we have just converted to this padded type, just get the
3766 inner expression. */
3767 if (TREE_CODE (expr) == CONSTRUCTOR
3768 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3769 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3770 == TYPE_FIELDS (etype))
3771 unpadded
3772 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3773
3774 /* Otherwise, build an explicit component reference. */
3775 else
3776 unpadded
3777 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3778
3779 return convert (type, unpadded);
3780 }
3781
3782 /* If the input is a biased type, adjust first. */
3783 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3784 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3785 fold_convert (TREE_TYPE (etype),
3786 expr),
3787 TYPE_MIN_VALUE (etype)));
3788
3789 /* If the input is a justified modular type, we need to extract the actual
3790 object before converting it to any other type with the exceptions of an
3791 unconstrained array or of a mere type variant. It is useful to avoid the
3792 extraction and conversion in the type variant case because it could end
3793 up replacing a VAR_DECL expr by a constructor and we might be about the
3794 take the address of the result. */
3795 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3796 && code != UNCONSTRAINED_ARRAY_TYPE
3797 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3798 return convert (type, build_component_ref (expr, NULL_TREE,
3799 TYPE_FIELDS (etype), false));
3800
3801 /* If converting to a type that contains a template, convert to the data
3802 type and then build the template. */
3803 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3804 {
3805 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3806 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
3807
3808 /* If the source already has a template, get a reference to the
3809 associated array only, as we are going to rebuild a template
3810 for the target type anyway. */
3811 expr = maybe_unconstrained_array (expr);
3812
3813 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3814 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3815 obj_type, NULL_TREE));
3816 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
3817 convert (obj_type, expr));
3818 return gnat_build_constructor (type, v);
3819 }
3820
3821 /* There are some special cases of expressions that we process
3822 specially. */
3823 switch (TREE_CODE (expr))
3824 {
3825 case ERROR_MARK:
3826 return expr;
3827
3828 case NULL_EXPR:
3829 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3830 conversion in gnat_expand_expr. NULL_EXPR does not represent
3831 and actual value, so no conversion is needed. */
3832 expr = copy_node (expr);
3833 TREE_TYPE (expr) = type;
3834 return expr;
3835
3836 case STRING_CST:
3837 /* If we are converting a STRING_CST to another constrained array type,
3838 just make a new one in the proper type. */
3839 if (code == ecode && AGGREGATE_TYPE_P (etype)
3840 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3841 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3842 {
3843 expr = copy_node (expr);
3844 TREE_TYPE (expr) = type;
3845 return expr;
3846 }
3847 break;
3848
3849 case VECTOR_CST:
3850 /* If we are converting a VECTOR_CST to a mere variant type, just make
3851 a new one in the proper type. */
3852 if (code == ecode && gnat_types_compatible_p (type, etype))
3853 {
3854 expr = copy_node (expr);
3855 TREE_TYPE (expr) = type;
3856 return expr;
3857 }
3858
3859 case CONSTRUCTOR:
3860 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3861 a new one in the proper type. */
3862 if (code == ecode && gnat_types_compatible_p (type, etype))
3863 {
3864 expr = copy_node (expr);
3865 TREE_TYPE (expr) = type;
3866 return expr;
3867 }
3868
3869 /* Likewise for a conversion between original and packable version, or
3870 conversion between types of the same size and with the same list of
3871 fields, but we have to work harder to preserve type consistency. */
3872 if (code == ecode
3873 && code == RECORD_TYPE
3874 && (TYPE_NAME (type) == TYPE_NAME (etype)
3875 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
3876
3877 {
3878 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3879 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3880 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3881 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3882 unsigned HOST_WIDE_INT idx;
3883 tree index, value;
3884
3885 /* Whether we need to clear TREE_CONSTANT et al. on the output
3886 constructor when we convert in place. */
3887 bool clear_constant = false;
3888
3889 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3890 {
3891 constructor_elt *elt;
3892 /* We expect only simple constructors. */
3893 if (!SAME_FIELD_P (index, efield))
3894 break;
3895 /* The field must be the same. */
3896 if (!SAME_FIELD_P (efield, field))
3897 break;
3898 elt = VEC_quick_push (constructor_elt, v, NULL);
3899 elt->index = field;
3900 elt->value = convert (TREE_TYPE (field), value);
3901
3902 /* If packing has made this field a bitfield and the input
3903 value couldn't be emitted statically any more, we need to
3904 clear TREE_CONSTANT on our output. */
3905 if (!clear_constant
3906 && TREE_CONSTANT (expr)
3907 && !CONSTRUCTOR_BITFIELD_P (efield)
3908 && CONSTRUCTOR_BITFIELD_P (field)
3909 && !initializer_constant_valid_for_bitfield_p (value))
3910 clear_constant = true;
3911
3912 efield = DECL_CHAIN (efield);
3913 field = DECL_CHAIN (field);
3914 }
3915
3916 /* If we have been able to match and convert all the input fields
3917 to their output type, convert in place now. We'll fallback to a
3918 view conversion downstream otherwise. */
3919 if (idx == len)
3920 {
3921 expr = copy_node (expr);
3922 TREE_TYPE (expr) = type;
3923 CONSTRUCTOR_ELTS (expr) = v;
3924 if (clear_constant)
3925 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
3926 return expr;
3927 }
3928 }
3929
3930 /* Likewise for a conversion between array type and vector type with a
3931 compatible representative array. */
3932 else if (code == VECTOR_TYPE
3933 && ecode == ARRAY_TYPE
3934 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
3935 etype))
3936 {
3937 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3938 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3939 VEC(constructor_elt,gc) *v;
3940 unsigned HOST_WIDE_INT ix;
3941 tree value;
3942
3943 /* Build a VECTOR_CST from a *constant* array constructor. */
3944 if (TREE_CONSTANT (expr))
3945 {
3946 bool constant_p = true;
3947
3948 /* Iterate through elements and check if all constructor
3949 elements are *_CSTs. */
3950 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3951 if (!CONSTANT_CLASS_P (value))
3952 {
3953 constant_p = false;
3954 break;
3955 }
3956
3957 if (constant_p)
3958 return build_vector_from_ctor (type,
3959 CONSTRUCTOR_ELTS (expr));
3960 }
3961
3962 /* Otherwise, build a regular vector constructor. */
3963 v = VEC_alloc (constructor_elt, gc, len);
3964 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
3965 {
3966 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3967 elt->index = NULL_TREE;
3968 elt->value = value;
3969 }
3970 expr = copy_node (expr);
3971 TREE_TYPE (expr) = type;
3972 CONSTRUCTOR_ELTS (expr) = v;
3973 return expr;
3974 }
3975 break;
3976
3977 case UNCONSTRAINED_ARRAY_REF:
3978 /* Convert this to the type of the inner array by getting the address of
3979 the array from the template. */
3980 expr = TREE_OPERAND (expr, 0);
3981 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3982 build_component_ref (expr, NULL_TREE,
3983 TYPE_FIELDS
3984 (TREE_TYPE (expr)),
3985 false));
3986 etype = TREE_TYPE (expr);
3987 ecode = TREE_CODE (etype);
3988 break;
3989
3990 case VIEW_CONVERT_EXPR:
3991 {
3992 /* GCC 4.x is very sensitive to type consistency overall, and view
3993 conversions thus are very frequent. Even though just "convert"ing
3994 the inner operand to the output type is fine in most cases, it
3995 might expose unexpected input/output type mismatches in special
3996 circumstances so we avoid such recursive calls when we can. */
3997 tree op0 = TREE_OPERAND (expr, 0);
3998
3999 /* If we are converting back to the original type, we can just
4000 lift the input conversion. This is a common occurrence with
4001 switches back-and-forth amongst type variants. */
4002 if (type == TREE_TYPE (op0))
4003 return op0;
4004
4005 /* Otherwise, if we're converting between two aggregate or vector
4006 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4007 target type in place or to just convert the inner expression. */
4008 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4009 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4010 {
4011 /* If we are converting between mere variants, we can just
4012 substitute the VIEW_CONVERT_EXPR in place. */
4013 if (gnat_types_compatible_p (type, etype))
4014 return build1 (VIEW_CONVERT_EXPR, type, op0);
4015
4016 /* Otherwise, we may just bypass the input view conversion unless
4017 one of the types is a fat pointer, which is handled by
4018 specialized code below which relies on exact type matching. */
4019 else if (!TYPE_IS_FAT_POINTER_P (type)
4020 && !TYPE_IS_FAT_POINTER_P (etype))
4021 return convert (type, op0);
4022 }
4023 }
4024 break;
4025
4026 default:
4027 break;
4028 }
4029
4030 /* Check for converting to a pointer to an unconstrained array. */
4031 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4032 return convert_to_fat_pointer (type, expr);
4033
4034 /* If we are converting between two aggregate or vector types that are mere
4035 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4036 to a vector type from its representative array type. */
4037 else if ((code == ecode
4038 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4039 && gnat_types_compatible_p (type, etype))
4040 || (code == VECTOR_TYPE
4041 && ecode == ARRAY_TYPE
4042 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4043 etype)))
4044 return build1 (VIEW_CONVERT_EXPR, type, expr);
4045
4046 /* If we are converting between tagged types, try to upcast properly. */
4047 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4048 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4049 {
4050 tree child_etype = etype;
4051 do {
4052 tree field = TYPE_FIELDS (child_etype);
4053 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4054 return build_component_ref (expr, NULL_TREE, field, false);
4055 child_etype = TREE_TYPE (field);
4056 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4057 }
4058
4059 /* If we are converting from a smaller form of record type back to it, just
4060 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4061 size on both sides. */
4062 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4063 && smaller_form_type_p (etype, type))
4064 {
4065 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4066 false, false, false, true),
4067 expr);
4068 return build1 (VIEW_CONVERT_EXPR, type, expr);
4069 }
4070
4071 /* In all other cases of related types, make a NOP_EXPR. */
4072 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4073 return fold_convert (type, expr);
4074
4075 switch (code)
4076 {
4077 case VOID_TYPE:
4078 return fold_build1 (CONVERT_EXPR, type, expr);
4079
4080 case INTEGER_TYPE:
4081 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4082 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4083 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4084 return unchecked_convert (type, expr, false);
4085 else if (TYPE_BIASED_REPRESENTATION_P (type))
4086 return fold_convert (type,
4087 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4088 convert (TREE_TYPE (type), expr),
4089 TYPE_MIN_VALUE (type)));
4090
4091 /* ... fall through ... */
4092
4093 case ENUMERAL_TYPE:
4094 case BOOLEAN_TYPE:
4095 /* If we are converting an additive expression to an integer type
4096 with lower precision, be wary of the optimization that can be
4097 applied by convert_to_integer. There are 2 problematic cases:
4098 - if the first operand was originally of a biased type,
4099 because we could be recursively called to convert it
4100 to an intermediate type and thus rematerialize the
4101 additive operator endlessly,
4102 - if the expression contains a placeholder, because an
4103 intermediate conversion that changes the sign could
4104 be inserted and thus introduce an artificial overflow
4105 at compile time when the placeholder is substituted. */
4106 if (code == INTEGER_TYPE
4107 && ecode == INTEGER_TYPE
4108 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4109 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4110 {
4111 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4112
4113 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4114 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4115 || CONTAINS_PLACEHOLDER_P (expr))
4116 return build1 (NOP_EXPR, type, expr);
4117 }
4118
4119 return fold (convert_to_integer (type, expr));
4120
4121 case POINTER_TYPE:
4122 case REFERENCE_TYPE:
4123 /* If converting between two pointers to records denoting
4124 both a template and type, adjust if needed to account
4125 for any differing offsets, since one might be negative. */
4126 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4127 {
4128 tree bit_diff
4129 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4130 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4131 tree byte_diff
4132 = size_binop (CEIL_DIV_EXPR, bit_diff, sbitsize_unit_node);
4133 expr = build1 (NOP_EXPR, type, expr);
4134 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4135 if (integer_zerop (byte_diff))
4136 return expr;
4137
4138 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4139 fold (convert (sizetype, byte_diff)));
4140 }
4141
4142 /* If converting to a thin pointer, handle specially. */
4143 if (TYPE_IS_THIN_POINTER_P (type)
4144 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4145 return convert_to_thin_pointer (type, expr);
4146
4147 /* If converting fat pointer to normal pointer, get the pointer to the
4148 array and then convert it. */
4149 else if (TYPE_IS_FAT_POINTER_P (etype))
4150 expr
4151 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4152
4153 return fold (convert_to_pointer (type, expr));
4154
4155 case REAL_TYPE:
4156 return fold (convert_to_real (type, expr));
4157
4158 case RECORD_TYPE:
4159 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4160 {
4161 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4162
4163 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4164 convert (TREE_TYPE (TYPE_FIELDS (type)),
4165 expr));
4166 return gnat_build_constructor (type, v);
4167 }
4168
4169 /* ... fall through ... */
4170
4171 case ARRAY_TYPE:
4172 /* In these cases, assume the front-end has validated the conversion.
4173 If the conversion is valid, it will be a bit-wise conversion, so
4174 it can be viewed as an unchecked conversion. */
4175 return unchecked_convert (type, expr, false);
4176
4177 case UNION_TYPE:
4178 /* This is a either a conversion between a tagged type and some
4179 subtype, which we have to mark as a UNION_TYPE because of
4180 overlapping fields or a conversion of an Unchecked_Union. */
4181 return unchecked_convert (type, expr, false);
4182
4183 case UNCONSTRAINED_ARRAY_TYPE:
4184 /* If the input is a VECTOR_TYPE, convert to the representative
4185 array type first. */
4186 if (ecode == VECTOR_TYPE)
4187 {
4188 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4189 etype = TREE_TYPE (expr);
4190 ecode = TREE_CODE (etype);
4191 }
4192
4193 /* If EXPR is a constrained array, take its address, convert it to a
4194 fat pointer, and then dereference it. Likewise if EXPR is a
4195 record containing both a template and a constrained array.
4196 Note that a record representing a justified modular type
4197 always represents a packed constrained array. */
4198 if (ecode == ARRAY_TYPE
4199 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4200 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4201 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4202 return
4203 build_unary_op
4204 (INDIRECT_REF, NULL_TREE,
4205 convert_to_fat_pointer (TREE_TYPE (type),
4206 build_unary_op (ADDR_EXPR,
4207 NULL_TREE, expr)));
4208
4209 /* Do something very similar for converting one unconstrained
4210 array to another. */
4211 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4212 return
4213 build_unary_op (INDIRECT_REF, NULL_TREE,
4214 convert (TREE_TYPE (type),
4215 build_unary_op (ADDR_EXPR,
4216 NULL_TREE, expr)));
4217 else
4218 gcc_unreachable ();
4219
4220 case COMPLEX_TYPE:
4221 return fold (convert_to_complex (type, expr));
4222
4223 default:
4224 gcc_unreachable ();
4225 }
4226 }
4227 \f
4228 /* Remove all conversions that are done in EXP. This includes converting
4229 from a padded type or to a justified modular type. If TRUE_ADDRESS
4230 is true, always return the address of the containing object even if
4231 the address is not bit-aligned. */
4232
4233 tree
4234 remove_conversions (tree exp, bool true_address)
4235 {
4236 switch (TREE_CODE (exp))
4237 {
4238 case CONSTRUCTOR:
4239 if (true_address
4240 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4241 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4242 return
4243 remove_conversions (VEC_index (constructor_elt,
4244 CONSTRUCTOR_ELTS (exp), 0)->value,
4245 true);
4246 break;
4247
4248 case COMPONENT_REF:
4249 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4250 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4251 break;
4252
4253 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4254 CASE_CONVERT:
4255 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4256
4257 default:
4258 break;
4259 }
4260
4261 return exp;
4262 }
4263 \f
4264 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4265 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4266 likewise return an expression pointing to the underlying array. */
4267
4268 tree
4269 maybe_unconstrained_array (tree exp)
4270 {
4271 enum tree_code code = TREE_CODE (exp);
4272 tree new_exp;
4273
4274 switch (TREE_CODE (TREE_TYPE (exp)))
4275 {
4276 case UNCONSTRAINED_ARRAY_TYPE:
4277 if (code == UNCONSTRAINED_ARRAY_REF)
4278 {
4279 new_exp = TREE_OPERAND (exp, 0);
4280 new_exp
4281 = build_unary_op (INDIRECT_REF, NULL_TREE,
4282 build_component_ref (new_exp, NULL_TREE,
4283 TYPE_FIELDS
4284 (TREE_TYPE (new_exp)),
4285 false));
4286 TREE_READONLY (new_exp) = TREE_READONLY (exp);
4287 return new_exp;
4288 }
4289
4290 else if (code == NULL_EXPR)
4291 return build1 (NULL_EXPR,
4292 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4293 (TREE_TYPE (TREE_TYPE (exp))))),
4294 TREE_OPERAND (exp, 0));
4295
4296 case RECORD_TYPE:
4297 /* If this is a padded type, convert to the unpadded type and see if
4298 it contains a template. */
4299 if (TYPE_PADDING_P (TREE_TYPE (exp)))
4300 {
4301 new_exp = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4302 if (TREE_CODE (TREE_TYPE (new_exp)) == RECORD_TYPE
4303 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new_exp)))
4304 return
4305 build_component_ref (new_exp, NULL_TREE,
4306 DECL_CHAIN
4307 (TYPE_FIELDS (TREE_TYPE (new_exp))),
4308 false);
4309 }
4310 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4311 return
4312 build_component_ref (exp, NULL_TREE,
4313 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))),
4314 false);
4315 break;
4316
4317 default:
4318 break;
4319 }
4320
4321 return exp;
4322 }
4323
4324 /* If EXP's type is a VECTOR_TYPE, return EXP converted to the associated
4325 TYPE_REPRESENTATIVE_ARRAY. */
4326
4327 tree
4328 maybe_vector_array (tree exp)
4329 {
4330 tree etype = TREE_TYPE (exp);
4331
4332 if (VECTOR_TYPE_P (etype))
4333 exp = convert (TYPE_REPRESENTATIVE_ARRAY (etype), exp);
4334
4335 return exp;
4336 }
4337 \f
4338 /* Return true if EXPR is an expression that can be folded as an operand
4339 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4340
4341 static bool
4342 can_fold_for_view_convert_p (tree expr)
4343 {
4344 tree t1, t2;
4345
4346 /* The folder will fold NOP_EXPRs between integral types with the same
4347 precision (in the middle-end's sense). We cannot allow it if the
4348 types don't have the same precision in the Ada sense as well. */
4349 if (TREE_CODE (expr) != NOP_EXPR)
4350 return true;
4351
4352 t1 = TREE_TYPE (expr);
4353 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4354
4355 /* Defer to the folder for non-integral conversions. */
4356 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4357 return true;
4358
4359 /* Only fold conversions that preserve both precisions. */
4360 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4361 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4362 return true;
4363
4364 return false;
4365 }
4366
4367 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4368 If NOTRUNC_P is true, truncation operations should be suppressed.
4369
4370 Special care is required with (source or target) integral types whose
4371 precision is not equal to their size, to make sure we fetch or assign
4372 the value bits whose location might depend on the endianness, e.g.
4373
4374 Rmsize : constant := 8;
4375 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4376
4377 type Bit_Array is array (1 .. Rmsize) of Boolean;
4378 pragma Pack (Bit_Array);
4379
4380 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4381
4382 Value : Int := 2#1000_0001#;
4383 Vbits : Bit_Array := To_Bit_Array (Value);
4384
4385 we expect the 8 bits at Vbits'Address to always contain Value, while
4386 their original location depends on the endianness, at Value'Address
4387 on a little-endian architecture but not on a big-endian one. */
4388
4389 tree
4390 unchecked_convert (tree type, tree expr, bool notrunc_p)
4391 {
4392 tree etype = TREE_TYPE (expr);
4393 enum tree_code ecode = TREE_CODE (etype);
4394 enum tree_code code = TREE_CODE (type);
4395 int c;
4396
4397 /* If the expression is already of the right type, we are done. */
4398 if (etype == type)
4399 return expr;
4400
4401 /* If both types types are integral just do a normal conversion.
4402 Likewise for a conversion to an unconstrained array. */
4403 if ((((INTEGRAL_TYPE_P (type)
4404 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
4405 || (POINTER_TYPE_P (type) && ! TYPE_IS_THIN_POINTER_P (type))
4406 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4407 && ((INTEGRAL_TYPE_P (etype)
4408 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
4409 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4410 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4411 || code == UNCONSTRAINED_ARRAY_TYPE)
4412 {
4413 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4414 {
4415 tree ntype = copy_type (etype);
4416 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4417 TYPE_MAIN_VARIANT (ntype) = ntype;
4418 expr = build1 (NOP_EXPR, ntype, expr);
4419 }
4420
4421 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4422 {
4423 tree rtype = copy_type (type);
4424 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4425 TYPE_MAIN_VARIANT (rtype) = rtype;
4426 expr = convert (rtype, expr);
4427 expr = build1 (NOP_EXPR, type, expr);
4428 }
4429 else
4430 expr = convert (type, expr);
4431 }
4432
4433 /* If we are converting to an integral type whose precision is not equal
4434 to its size, first unchecked convert to a record that contains an
4435 object of the output type. Then extract the field. */
4436 else if (INTEGRAL_TYPE_P (type)
4437 && TYPE_RM_SIZE (type)
4438 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4439 GET_MODE_BITSIZE (TYPE_MODE (type))))
4440 {
4441 tree rec_type = make_node (RECORD_TYPE);
4442 tree field = create_field_decl (get_identifier ("OBJ"), type, rec_type,
4443 NULL_TREE, NULL_TREE, 1, 0);
4444
4445 TYPE_FIELDS (rec_type) = field;
4446 layout_type (rec_type);
4447
4448 expr = unchecked_convert (rec_type, expr, notrunc_p);
4449 expr = build_component_ref (expr, NULL_TREE, field, false);
4450 }
4451
4452 /* Similarly if we are converting from an integral type whose precision
4453 is not equal to its size. */
4454 else if (INTEGRAL_TYPE_P (etype)
4455 && TYPE_RM_SIZE (etype)
4456 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4457 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4458 {
4459 tree rec_type = make_node (RECORD_TYPE);
4460 tree field = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4461 NULL_TREE, NULL_TREE, 1, 0);
4462 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4463
4464 TYPE_FIELDS (rec_type) = field;
4465 layout_type (rec_type);
4466
4467 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4468 expr = gnat_build_constructor (rec_type, v);
4469 expr = unchecked_convert (type, expr, notrunc_p);
4470 }
4471
4472 /* If we are converting from a scalar type to a type with a different size,
4473 we need to pad to have the same size on both sides.
4474
4475 ??? We cannot do it unconditionally because unchecked conversions are
4476 used liberally by the front-end to implement polymorphism, e.g. in:
4477
4478 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4479 return p___size__4 (p__object!(S191s.all));
4480
4481 so we skip all expressions that are references. */
4482 else if (!REFERENCE_CLASS_P (expr)
4483 && !AGGREGATE_TYPE_P (etype)
4484 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4485 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4486 {
4487 if (c < 0)
4488 {
4489 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4490 false, false, false, true),
4491 expr);
4492 expr = unchecked_convert (type, expr, notrunc_p);
4493 }
4494 else
4495 {
4496 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4497 false, false, false, true);
4498 expr = unchecked_convert (rec_type, expr, notrunc_p);
4499 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4500 false);
4501 }
4502 }
4503
4504 /* We have a special case when we are converting between two unconstrained
4505 array types. In that case, take the address, convert the fat pointer
4506 types, and dereference. */
4507 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4508 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4509 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4510 build_unary_op (ADDR_EXPR, NULL_TREE,
4511 expr)));
4512
4513 /* Another special case is when we are converting to a vector type from its
4514 representative array type; this a regular conversion. */
4515 else if (code == VECTOR_TYPE
4516 && ecode == ARRAY_TYPE
4517 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4518 etype))
4519 expr = convert (type, expr);
4520
4521 else
4522 {
4523 expr = maybe_unconstrained_array (expr);
4524 etype = TREE_TYPE (expr);
4525 ecode = TREE_CODE (etype);
4526 if (can_fold_for_view_convert_p (expr))
4527 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4528 else
4529 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4530 }
4531
4532 /* If the result is an integral type whose precision is not equal to its
4533 size, sign- or zero-extend the result. We need not do this if the input
4534 is an integral type of the same precision and signedness or if the output
4535 is a biased type or if both the input and output are unsigned. */
4536 if (!notrunc_p
4537 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4538 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4539 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4540 GET_MODE_BITSIZE (TYPE_MODE (type)))
4541 && !(INTEGRAL_TYPE_P (etype)
4542 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4543 && operand_equal_p (TYPE_RM_SIZE (type),
4544 (TYPE_RM_SIZE (etype) != 0
4545 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4546 0))
4547 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4548 {
4549 tree base_type
4550 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4551 tree shift_expr
4552 = convert (base_type,
4553 size_binop (MINUS_EXPR,
4554 bitsize_int
4555 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4556 TYPE_RM_SIZE (type)));
4557 expr
4558 = convert (type,
4559 build_binary_op (RSHIFT_EXPR, base_type,
4560 build_binary_op (LSHIFT_EXPR, base_type,
4561 convert (base_type, expr),
4562 shift_expr),
4563 shift_expr));
4564 }
4565
4566 /* An unchecked conversion should never raise Constraint_Error. The code
4567 below assumes that GCC's conversion routines overflow the same way that
4568 the underlying hardware does. This is probably true. In the rare case
4569 when it is false, we can rely on the fact that such conversions are
4570 erroneous anyway. */
4571 if (TREE_CODE (expr) == INTEGER_CST)
4572 TREE_OVERFLOW (expr) = 0;
4573
4574 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4575 show no longer constant. */
4576 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4577 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4578 OEP_ONLY_CONST))
4579 TREE_CONSTANT (expr) = 0;
4580
4581 return expr;
4582 }
4583 \f
4584 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4585 the latter being a record type as predicated by Is_Record_Type. */
4586
4587 enum tree_code
4588 tree_code_for_record_type (Entity_Id gnat_type)
4589 {
4590 Node_Id component_list
4591 = Component_List (Type_Definition
4592 (Declaration_Node
4593 (Implementation_Base_Type (gnat_type))));
4594 Node_Id component;
4595
4596 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4597 we have a non-discriminant field outside a variant. In either case,
4598 it's a RECORD_TYPE. */
4599
4600 if (!Is_Unchecked_Union (gnat_type))
4601 return RECORD_TYPE;
4602
4603 for (component = First_Non_Pragma (Component_Items (component_list));
4604 Present (component);
4605 component = Next_Non_Pragma (component))
4606 if (Ekind (Defining_Entity (component)) == E_Component)
4607 return RECORD_TYPE;
4608
4609 return UNION_TYPE;
4610 }
4611
4612 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
4613 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
4614 according to the presence of an alignment clause on the type or, if it
4615 is an array, on the component type. */
4616
4617 bool
4618 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
4619 {
4620 gnat_type = Underlying_Type (gnat_type);
4621
4622 *align_clause = Present (Alignment_Clause (gnat_type));
4623
4624 if (Is_Array_Type (gnat_type))
4625 {
4626 gnat_type = Underlying_Type (Component_Type (gnat_type));
4627 if (Present (Alignment_Clause (gnat_type)))
4628 *align_clause = true;
4629 }
4630
4631 if (!Is_Floating_Point_Type (gnat_type))
4632 return false;
4633
4634 if (UI_To_Int (Esize (gnat_type)) != 64)
4635 return false;
4636
4637 return true;
4638 }
4639
4640 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
4641 size is greater or equal to 64 bits, or an array of such a type. Set
4642 ALIGN_CLAUSE according to the presence of an alignment clause on the
4643 type or, if it is an array, on the component type. */
4644
4645 bool
4646 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
4647 {
4648 gnat_type = Underlying_Type (gnat_type);
4649
4650 *align_clause = Present (Alignment_Clause (gnat_type));
4651
4652 if (Is_Array_Type (gnat_type))
4653 {
4654 gnat_type = Underlying_Type (Component_Type (gnat_type));
4655 if (Present (Alignment_Clause (gnat_type)))
4656 *align_clause = true;
4657 }
4658
4659 if (!Is_Scalar_Type (gnat_type))
4660 return false;
4661
4662 if (UI_To_Int (Esize (gnat_type)) < 64)
4663 return false;
4664
4665 return true;
4666 }
4667
4668 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4669 component of an aggregate type. */
4670
4671 bool
4672 type_for_nonaliased_component_p (tree gnu_type)
4673 {
4674 /* If the type is passed by reference, we may have pointers to the
4675 component so it cannot be made non-aliased. */
4676 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4677 return false;
4678
4679 /* We used to say that any component of aggregate type is aliased
4680 because the front-end may take 'Reference of it. The front-end
4681 has been enhanced in the meantime so as to use a renaming instead
4682 in most cases, but the back-end can probably take the address of
4683 such a component too so we go for the conservative stance.
4684
4685 For instance, we might need the address of any array type, even
4686 if normally passed by copy, to construct a fat pointer if the
4687 component is used as an actual for an unconstrained formal.
4688
4689 Likewise for record types: even if a specific record subtype is
4690 passed by copy, the parent type might be passed by ref (e.g. if
4691 it's of variable size) and we might take the address of a child
4692 component to pass to a parent formal. We have no way to check
4693 for such conditions here. */
4694 if (AGGREGATE_TYPE_P (gnu_type))
4695 return false;
4696
4697 return true;
4698 }
4699
4700 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
4701
4702 bool
4703 smaller_form_type_p (tree type, tree orig_type)
4704 {
4705 tree size, osize;
4706
4707 /* We're not interested in variants here. */
4708 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
4709 return false;
4710
4711 /* Like a variant, a packable version keeps the original TYPE_NAME. */
4712 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
4713 return false;
4714
4715 size = TYPE_SIZE (type);
4716 osize = TYPE_SIZE (orig_type);
4717
4718 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
4719 return false;
4720
4721 return tree_int_cst_lt (size, osize) != 0;
4722 }
4723
4724 /* Perform final processing on global variables. */
4725
4726 void
4727 gnat_write_global_declarations (void)
4728 {
4729 /* Proceed to optimize and emit assembly.
4730 FIXME: shouldn't be the front end's responsibility to call this. */
4731 cgraph_finalize_compilation_unit ();
4732
4733 /* Emit debug info for all global declarations. */
4734 emit_debug_global_declarations (VEC_address (tree, global_decls),
4735 VEC_length (tree, global_decls));
4736 }
4737
4738 /* ************************************************************************
4739 * * GCC builtins support *
4740 * ************************************************************************ */
4741
4742 /* The general scheme is fairly simple:
4743
4744 For each builtin function/type to be declared, gnat_install_builtins calls
4745 internal facilities which eventually get to gnat_push_decl, which in turn
4746 tracks the so declared builtin function decls in the 'builtin_decls' global
4747 datastructure. When an Intrinsic subprogram declaration is processed, we
4748 search this global datastructure to retrieve the associated BUILT_IN DECL
4749 node. */
4750
4751 /* Search the chain of currently available builtin declarations for a node
4752 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4753 found, if any, or NULL_TREE otherwise. */
4754 tree
4755 builtin_decl_for (tree name)
4756 {
4757 unsigned i;
4758 tree decl;
4759
4760 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
4761 if (DECL_NAME (decl) == name)
4762 return decl;
4763
4764 return NULL_TREE;
4765 }
4766
4767 /* The code below eventually exposes gnat_install_builtins, which declares
4768 the builtin types and functions we might need, either internally or as
4769 user accessible facilities.
4770
4771 ??? This is a first implementation shot, still in rough shape. It is
4772 heavily inspired from the "C" family implementation, with chunks copied
4773 verbatim from there.
4774
4775 Two obvious TODO candidates are
4776 o Use a more efficient name/decl mapping scheme
4777 o Devise a middle-end infrastructure to avoid having to copy
4778 pieces between front-ends. */
4779
4780 /* ----------------------------------------------------------------------- *
4781 * BUILTIN ELEMENTARY TYPES *
4782 * ----------------------------------------------------------------------- */
4783
4784 /* Standard data types to be used in builtin argument declarations. */
4785
4786 enum c_tree_index
4787 {
4788 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4789 CTI_STRING_TYPE,
4790 CTI_CONST_STRING_TYPE,
4791
4792 CTI_MAX
4793 };
4794
4795 static tree c_global_trees[CTI_MAX];
4796
4797 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4798 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4799 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4800
4801 /* ??? In addition some attribute handlers, we currently don't support a
4802 (small) number of builtin-types, which in turns inhibits support for a
4803 number of builtin functions. */
4804 #define wint_type_node void_type_node
4805 #define intmax_type_node void_type_node
4806 #define uintmax_type_node void_type_node
4807
4808 /* Build the void_list_node (void_type_node having been created). */
4809
4810 static tree
4811 build_void_list_node (void)
4812 {
4813 tree t = build_tree_list (NULL_TREE, void_type_node);
4814 return t;
4815 }
4816
4817 /* Used to help initialize the builtin-types.def table. When a type of
4818 the correct size doesn't exist, use error_mark_node instead of NULL.
4819 The later results in segfaults even when a decl using the type doesn't
4820 get invoked. */
4821
4822 static tree
4823 builtin_type_for_size (int size, bool unsignedp)
4824 {
4825 tree type = gnat_type_for_size (size, unsignedp);
4826 return type ? type : error_mark_node;
4827 }
4828
4829 /* Build/push the elementary type decls that builtin functions/types
4830 will need. */
4831
4832 static void
4833 install_builtin_elementary_types (void)
4834 {
4835 signed_size_type_node = gnat_signed_type (size_type_node);
4836 pid_type_node = integer_type_node;
4837 void_list_node = build_void_list_node ();
4838
4839 string_type_node = build_pointer_type (char_type_node);
4840 const_string_type_node
4841 = build_pointer_type (build_qualified_type
4842 (char_type_node, TYPE_QUAL_CONST));
4843 }
4844
4845 /* ----------------------------------------------------------------------- *
4846 * BUILTIN FUNCTION TYPES *
4847 * ----------------------------------------------------------------------- */
4848
4849 /* Now, builtin function types per se. */
4850
4851 enum c_builtin_type
4852 {
4853 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4854 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4855 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4856 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4857 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4858 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4859 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4860 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4861 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4862 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4863 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4864 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4865 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4866 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4867 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4868 NAME,
4869 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4870 #include "builtin-types.def"
4871 #undef DEF_PRIMITIVE_TYPE
4872 #undef DEF_FUNCTION_TYPE_0
4873 #undef DEF_FUNCTION_TYPE_1
4874 #undef DEF_FUNCTION_TYPE_2
4875 #undef DEF_FUNCTION_TYPE_3
4876 #undef DEF_FUNCTION_TYPE_4
4877 #undef DEF_FUNCTION_TYPE_5
4878 #undef DEF_FUNCTION_TYPE_6
4879 #undef DEF_FUNCTION_TYPE_7
4880 #undef DEF_FUNCTION_TYPE_VAR_0
4881 #undef DEF_FUNCTION_TYPE_VAR_1
4882 #undef DEF_FUNCTION_TYPE_VAR_2
4883 #undef DEF_FUNCTION_TYPE_VAR_3
4884 #undef DEF_FUNCTION_TYPE_VAR_4
4885 #undef DEF_FUNCTION_TYPE_VAR_5
4886 #undef DEF_POINTER_TYPE
4887 BT_LAST
4888 };
4889
4890 typedef enum c_builtin_type builtin_type;
4891
4892 /* A temporary array used in communication with def_fn_type. */
4893 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4894
4895 /* A helper function for install_builtin_types. Build function type
4896 for DEF with return type RET and N arguments. If VAR is true, then the
4897 function should be variadic after those N arguments.
4898
4899 Takes special care not to ICE if any of the types involved are
4900 error_mark_node, which indicates that said type is not in fact available
4901 (see builtin_type_for_size). In which case the function type as a whole
4902 should be error_mark_node. */
4903
4904 static void
4905 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4906 {
4907 tree args = NULL, t;
4908 va_list list;
4909 int i;
4910
4911 va_start (list, n);
4912 for (i = 0; i < n; ++i)
4913 {
4914 builtin_type a = (builtin_type) va_arg (list, int);
4915 t = builtin_types[a];
4916 if (t == error_mark_node)
4917 goto egress;
4918 args = tree_cons (NULL_TREE, t, args);
4919 }
4920 va_end (list);
4921
4922 args = nreverse (args);
4923 if (!var)
4924 args = chainon (args, void_list_node);
4925
4926 t = builtin_types[ret];
4927 if (t == error_mark_node)
4928 goto egress;
4929 t = build_function_type (t, args);
4930
4931 egress:
4932 builtin_types[def] = t;
4933 va_end (list);
4934 }
4935
4936 /* Build the builtin function types and install them in the builtin_types
4937 array for later use in builtin function decls. */
4938
4939 static void
4940 install_builtin_function_types (void)
4941 {
4942 tree va_list_ref_type_node;
4943 tree va_list_arg_type_node;
4944
4945 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4946 {
4947 va_list_arg_type_node = va_list_ref_type_node =
4948 build_pointer_type (TREE_TYPE (va_list_type_node));
4949 }
4950 else
4951 {
4952 va_list_arg_type_node = va_list_type_node;
4953 va_list_ref_type_node = build_reference_type (va_list_type_node);
4954 }
4955
4956 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4957 builtin_types[ENUM] = VALUE;
4958 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4959 def_fn_type (ENUM, RETURN, 0, 0);
4960 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4961 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4962 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4963 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4964 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4965 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4966 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4967 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4968 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4969 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4970 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4971 ARG6) \
4972 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4973 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4974 ARG6, ARG7) \
4975 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4976 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4977 def_fn_type (ENUM, RETURN, 1, 0);
4978 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4979 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4980 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4981 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4982 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4983 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4984 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4985 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4986 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4987 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4988 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4989 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4990
4991 #include "builtin-types.def"
4992
4993 #undef DEF_PRIMITIVE_TYPE
4994 #undef DEF_FUNCTION_TYPE_1
4995 #undef DEF_FUNCTION_TYPE_2
4996 #undef DEF_FUNCTION_TYPE_3
4997 #undef DEF_FUNCTION_TYPE_4
4998 #undef DEF_FUNCTION_TYPE_5
4999 #undef DEF_FUNCTION_TYPE_6
5000 #undef DEF_FUNCTION_TYPE_VAR_0
5001 #undef DEF_FUNCTION_TYPE_VAR_1
5002 #undef DEF_FUNCTION_TYPE_VAR_2
5003 #undef DEF_FUNCTION_TYPE_VAR_3
5004 #undef DEF_FUNCTION_TYPE_VAR_4
5005 #undef DEF_FUNCTION_TYPE_VAR_5
5006 #undef DEF_POINTER_TYPE
5007 builtin_types[(int) BT_LAST] = NULL_TREE;
5008 }
5009
5010 /* ----------------------------------------------------------------------- *
5011 * BUILTIN ATTRIBUTES *
5012 * ----------------------------------------------------------------------- */
5013
5014 enum built_in_attribute
5015 {
5016 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5017 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5018 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5019 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5020 #include "builtin-attrs.def"
5021 #undef DEF_ATTR_NULL_TREE
5022 #undef DEF_ATTR_INT
5023 #undef DEF_ATTR_IDENT
5024 #undef DEF_ATTR_TREE_LIST
5025 ATTR_LAST
5026 };
5027
5028 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5029
5030 static void
5031 install_builtin_attributes (void)
5032 {
5033 /* Fill in the built_in_attributes array. */
5034 #define DEF_ATTR_NULL_TREE(ENUM) \
5035 built_in_attributes[(int) ENUM] = NULL_TREE;
5036 #define DEF_ATTR_INT(ENUM, VALUE) \
5037 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5038 #define DEF_ATTR_IDENT(ENUM, STRING) \
5039 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5040 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5041 built_in_attributes[(int) ENUM] \
5042 = tree_cons (built_in_attributes[(int) PURPOSE], \
5043 built_in_attributes[(int) VALUE], \
5044 built_in_attributes[(int) CHAIN]);
5045 #include "builtin-attrs.def"
5046 #undef DEF_ATTR_NULL_TREE
5047 #undef DEF_ATTR_INT
5048 #undef DEF_ATTR_IDENT
5049 #undef DEF_ATTR_TREE_LIST
5050 }
5051
5052 /* Handle a "const" attribute; arguments as in
5053 struct attribute_spec.handler. */
5054
5055 static tree
5056 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5057 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5058 bool *no_add_attrs)
5059 {
5060 if (TREE_CODE (*node) == FUNCTION_DECL)
5061 TREE_READONLY (*node) = 1;
5062 else
5063 *no_add_attrs = true;
5064
5065 return NULL_TREE;
5066 }
5067
5068 /* Handle a "nothrow" attribute; arguments as in
5069 struct attribute_spec.handler. */
5070
5071 static tree
5072 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5073 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5074 bool *no_add_attrs)
5075 {
5076 if (TREE_CODE (*node) == FUNCTION_DECL)
5077 TREE_NOTHROW (*node) = 1;
5078 else
5079 *no_add_attrs = true;
5080
5081 return NULL_TREE;
5082 }
5083
5084 /* Handle a "pure" attribute; arguments as in
5085 struct attribute_spec.handler. */
5086
5087 static tree
5088 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5089 int ARG_UNUSED (flags), bool *no_add_attrs)
5090 {
5091 if (TREE_CODE (*node) == FUNCTION_DECL)
5092 DECL_PURE_P (*node) = 1;
5093 /* ??? TODO: Support types. */
5094 else
5095 {
5096 warning (OPT_Wattributes, "%qs attribute ignored",
5097 IDENTIFIER_POINTER (name));
5098 *no_add_attrs = true;
5099 }
5100
5101 return NULL_TREE;
5102 }
5103
5104 /* Handle a "no vops" attribute; arguments as in
5105 struct attribute_spec.handler. */
5106
5107 static tree
5108 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5109 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5110 bool *ARG_UNUSED (no_add_attrs))
5111 {
5112 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5113 DECL_IS_NOVOPS (*node) = 1;
5114 return NULL_TREE;
5115 }
5116
5117 /* Helper for nonnull attribute handling; fetch the operand number
5118 from the attribute argument list. */
5119
5120 static bool
5121 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5122 {
5123 /* Verify the arg number is a constant. */
5124 if (TREE_CODE (arg_num_expr) != INTEGER_CST
5125 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
5126 return false;
5127
5128 *valp = TREE_INT_CST_LOW (arg_num_expr);
5129 return true;
5130 }
5131
5132 /* Handle the "nonnull" attribute. */
5133 static tree
5134 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5135 tree args, int ARG_UNUSED (flags),
5136 bool *no_add_attrs)
5137 {
5138 tree type = *node;
5139 unsigned HOST_WIDE_INT attr_arg_num;
5140
5141 /* If no arguments are specified, all pointer arguments should be
5142 non-null. Verify a full prototype is given so that the arguments
5143 will have the correct types when we actually check them later. */
5144 if (!args)
5145 {
5146 if (!prototype_p (type))
5147 {
5148 error ("nonnull attribute without arguments on a non-prototype");
5149 *no_add_attrs = true;
5150 }
5151 return NULL_TREE;
5152 }
5153
5154 /* Argument list specified. Verify that each argument number references
5155 a pointer argument. */
5156 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5157 {
5158 tree argument;
5159 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5160
5161 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5162 {
5163 error ("nonnull argument has invalid operand number (argument %lu)",
5164 (unsigned long) attr_arg_num);
5165 *no_add_attrs = true;
5166 return NULL_TREE;
5167 }
5168
5169 argument = TYPE_ARG_TYPES (type);
5170 if (argument)
5171 {
5172 for (ck_num = 1; ; ck_num++)
5173 {
5174 if (!argument || ck_num == arg_num)
5175 break;
5176 argument = TREE_CHAIN (argument);
5177 }
5178
5179 if (!argument
5180 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5181 {
5182 error ("nonnull argument with out-of-range operand number "
5183 "(argument %lu, operand %lu)",
5184 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5185 *no_add_attrs = true;
5186 return NULL_TREE;
5187 }
5188
5189 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5190 {
5191 error ("nonnull argument references non-pointer operand "
5192 "(argument %lu, operand %lu)",
5193 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5194 *no_add_attrs = true;
5195 return NULL_TREE;
5196 }
5197 }
5198 }
5199
5200 return NULL_TREE;
5201 }
5202
5203 /* Handle a "sentinel" attribute. */
5204
5205 static tree
5206 handle_sentinel_attribute (tree *node, tree name, tree args,
5207 int ARG_UNUSED (flags), bool *no_add_attrs)
5208 {
5209 tree params = TYPE_ARG_TYPES (*node);
5210
5211 if (!prototype_p (*node))
5212 {
5213 warning (OPT_Wattributes,
5214 "%qs attribute requires prototypes with named arguments",
5215 IDENTIFIER_POINTER (name));
5216 *no_add_attrs = true;
5217 }
5218 else
5219 {
5220 while (TREE_CHAIN (params))
5221 params = TREE_CHAIN (params);
5222
5223 if (VOID_TYPE_P (TREE_VALUE (params)))
5224 {
5225 warning (OPT_Wattributes,
5226 "%qs attribute only applies to variadic functions",
5227 IDENTIFIER_POINTER (name));
5228 *no_add_attrs = true;
5229 }
5230 }
5231
5232 if (args)
5233 {
5234 tree position = TREE_VALUE (args);
5235
5236 if (TREE_CODE (position) != INTEGER_CST)
5237 {
5238 warning (0, "requested position is not an integer constant");
5239 *no_add_attrs = true;
5240 }
5241 else
5242 {
5243 if (tree_int_cst_lt (position, integer_zero_node))
5244 {
5245 warning (0, "requested position is less than zero");
5246 *no_add_attrs = true;
5247 }
5248 }
5249 }
5250
5251 return NULL_TREE;
5252 }
5253
5254 /* Handle a "noreturn" attribute; arguments as in
5255 struct attribute_spec.handler. */
5256
5257 static tree
5258 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5259 int ARG_UNUSED (flags), bool *no_add_attrs)
5260 {
5261 tree type = TREE_TYPE (*node);
5262
5263 /* See FIXME comment in c_common_attribute_table. */
5264 if (TREE_CODE (*node) == FUNCTION_DECL)
5265 TREE_THIS_VOLATILE (*node) = 1;
5266 else if (TREE_CODE (type) == POINTER_TYPE
5267 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5268 TREE_TYPE (*node)
5269 = build_pointer_type
5270 (build_type_variant (TREE_TYPE (type),
5271 TYPE_READONLY (TREE_TYPE (type)), 1));
5272 else
5273 {
5274 warning (OPT_Wattributes, "%qs attribute ignored",
5275 IDENTIFIER_POINTER (name));
5276 *no_add_attrs = true;
5277 }
5278
5279 return NULL_TREE;
5280 }
5281
5282 /* Handle a "leaf" attribute; arguments as in
5283 struct attribute_spec.handler. */
5284
5285 static tree
5286 handle_leaf_attribute (tree *node, tree name,
5287 tree ARG_UNUSED (args),
5288 int ARG_UNUSED (flags), bool *no_add_attrs)
5289 {
5290 if (TREE_CODE (*node) != FUNCTION_DECL)
5291 {
5292 warning (OPT_Wattributes, "%qE attribute ignored", name);
5293 *no_add_attrs = true;
5294 }
5295 if (!TREE_PUBLIC (*node))
5296 {
5297 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5298 *no_add_attrs = true;
5299 }
5300
5301 return NULL_TREE;
5302 }
5303
5304 /* Handle a "malloc" attribute; arguments as in
5305 struct attribute_spec.handler. */
5306
5307 static tree
5308 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5309 int ARG_UNUSED (flags), bool *no_add_attrs)
5310 {
5311 if (TREE_CODE (*node) == FUNCTION_DECL
5312 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5313 DECL_IS_MALLOC (*node) = 1;
5314 else
5315 {
5316 warning (OPT_Wattributes, "%qs attribute ignored",
5317 IDENTIFIER_POINTER (name));
5318 *no_add_attrs = true;
5319 }
5320
5321 return NULL_TREE;
5322 }
5323
5324 /* Fake handler for attributes we don't properly support. */
5325
5326 tree
5327 fake_attribute_handler (tree * ARG_UNUSED (node),
5328 tree ARG_UNUSED (name),
5329 tree ARG_UNUSED (args),
5330 int ARG_UNUSED (flags),
5331 bool * ARG_UNUSED (no_add_attrs))
5332 {
5333 return NULL_TREE;
5334 }
5335
5336 /* Handle a "type_generic" attribute. */
5337
5338 static tree
5339 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5340 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5341 bool * ARG_UNUSED (no_add_attrs))
5342 {
5343 tree params;
5344
5345 /* Ensure we have a function type. */
5346 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5347
5348 params = TYPE_ARG_TYPES (*node);
5349 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5350 params = TREE_CHAIN (params);
5351
5352 /* Ensure we have a variadic function. */
5353 gcc_assert (!params);
5354
5355 return NULL_TREE;
5356 }
5357
5358 /* Handle a "vector_size" attribute; arguments as in
5359 struct attribute_spec.handler. */
5360
5361 static tree
5362 handle_vector_size_attribute (tree *node, tree name, tree args,
5363 int ARG_UNUSED (flags),
5364 bool *no_add_attrs)
5365 {
5366 unsigned HOST_WIDE_INT vecsize, nunits;
5367 enum machine_mode orig_mode;
5368 tree type = *node, new_type, size;
5369
5370 *no_add_attrs = true;
5371
5372 size = TREE_VALUE (args);
5373
5374 if (!host_integerp (size, 1))
5375 {
5376 warning (OPT_Wattributes, "%qs attribute ignored",
5377 IDENTIFIER_POINTER (name));
5378 return NULL_TREE;
5379 }
5380
5381 /* Get the vector size (in bytes). */
5382 vecsize = tree_low_cst (size, 1);
5383
5384 /* We need to provide for vector pointers, vector arrays, and
5385 functions returning vectors. For example:
5386
5387 __attribute__((vector_size(16))) short *foo;
5388
5389 In this case, the mode is SI, but the type being modified is
5390 HI, so we need to look further. */
5391
5392 while (POINTER_TYPE_P (type)
5393 || TREE_CODE (type) == FUNCTION_TYPE
5394 || TREE_CODE (type) == ARRAY_TYPE)
5395 type = TREE_TYPE (type);
5396
5397 /* Get the mode of the type being modified. */
5398 orig_mode = TYPE_MODE (type);
5399
5400 if ((!INTEGRAL_TYPE_P (type)
5401 && !SCALAR_FLOAT_TYPE_P (type)
5402 && !FIXED_POINT_TYPE_P (type))
5403 || (!SCALAR_FLOAT_MODE_P (orig_mode)
5404 && GET_MODE_CLASS (orig_mode) != MODE_INT
5405 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
5406 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
5407 || TREE_CODE (type) == BOOLEAN_TYPE)
5408 {
5409 error ("invalid vector type for attribute %qs",
5410 IDENTIFIER_POINTER (name));
5411 return NULL_TREE;
5412 }
5413
5414 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
5415 {
5416 error ("vector size not an integral multiple of component size");
5417 return NULL;
5418 }
5419
5420 if (vecsize == 0)
5421 {
5422 error ("zero vector size");
5423 return NULL;
5424 }
5425
5426 /* Calculate how many units fit in the vector. */
5427 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
5428 if (nunits & (nunits - 1))
5429 {
5430 error ("number of components of the vector not a power of two");
5431 return NULL_TREE;
5432 }
5433
5434 new_type = build_vector_type (type, nunits);
5435
5436 /* Build back pointers if needed. */
5437 *node = reconstruct_complex_type (*node, new_type);
5438
5439 return NULL_TREE;
5440 }
5441
5442 /* Handle a "vector_type" attribute; arguments as in
5443 struct attribute_spec.handler. */
5444
5445 static tree
5446 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5447 int ARG_UNUSED (flags),
5448 bool *no_add_attrs)
5449 {
5450 /* Vector representative type and size. */
5451 tree rep_type = *node;
5452 tree rep_size = TYPE_SIZE_UNIT (rep_type);
5453 tree rep_name;
5454
5455 /* Vector size in bytes and number of units. */
5456 unsigned HOST_WIDE_INT vec_bytes, vec_units;
5457
5458 /* Vector element type and mode. */
5459 tree elem_type;
5460 enum machine_mode elem_mode;
5461
5462 *no_add_attrs = true;
5463
5464 /* Get the representative array type, possibly nested within a
5465 padding record e.g. for alignment purposes. */
5466
5467 if (TYPE_IS_PADDING_P (rep_type))
5468 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
5469
5470 if (TREE_CODE (rep_type) != ARRAY_TYPE)
5471 {
5472 error ("attribute %qs applies to array types only",
5473 IDENTIFIER_POINTER (name));
5474 return NULL_TREE;
5475 }
5476
5477 /* Silently punt on variable sizes. We can't make vector types for them,
5478 need to ignore them on front-end generated subtypes of unconstrained
5479 bases, and this attribute is for binding implementors, not end-users, so
5480 we should never get there from legitimate explicit uses. */
5481
5482 if (!host_integerp (rep_size, 1))
5483 return NULL_TREE;
5484
5485 /* Get the element type/mode and check this is something we know
5486 how to make vectors of. */
5487
5488 elem_type = TREE_TYPE (rep_type);
5489 elem_mode = TYPE_MODE (elem_type);
5490
5491 if ((!INTEGRAL_TYPE_P (elem_type)
5492 && !SCALAR_FLOAT_TYPE_P (elem_type)
5493 && !FIXED_POINT_TYPE_P (elem_type))
5494 || (!SCALAR_FLOAT_MODE_P (elem_mode)
5495 && GET_MODE_CLASS (elem_mode) != MODE_INT
5496 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
5497 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
5498 {
5499 error ("invalid element type for attribute %qs",
5500 IDENTIFIER_POINTER (name));
5501 return NULL_TREE;
5502 }
5503
5504 /* Sanity check the vector size and element type consistency. */
5505
5506 vec_bytes = tree_low_cst (rep_size, 1);
5507
5508 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
5509 {
5510 error ("vector size not an integral multiple of component size");
5511 return NULL;
5512 }
5513
5514 if (vec_bytes == 0)
5515 {
5516 error ("zero vector size");
5517 return NULL;
5518 }
5519
5520 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
5521 if (vec_units & (vec_units - 1))
5522 {
5523 error ("number of components of the vector not a power of two");
5524 return NULL_TREE;
5525 }
5526
5527 /* Build the vector type and replace. */
5528
5529 *node = build_vector_type (elem_type, vec_units);
5530 rep_name = TYPE_NAME (rep_type);
5531 if (TREE_CODE (rep_name) == TYPE_DECL)
5532 rep_name = DECL_NAME (rep_name);
5533 TYPE_NAME (*node) = rep_name;
5534 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
5535
5536 return NULL_TREE;
5537 }
5538
5539 /* ----------------------------------------------------------------------- *
5540 * BUILTIN FUNCTIONS *
5541 * ----------------------------------------------------------------------- */
5542
5543 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5544 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5545 if nonansi_p and flag_no_nonansi_builtin. */
5546
5547 static void
5548 def_builtin_1 (enum built_in_function fncode,
5549 const char *name,
5550 enum built_in_class fnclass,
5551 tree fntype, tree libtype,
5552 bool both_p, bool fallback_p,
5553 bool nonansi_p ATTRIBUTE_UNUSED,
5554 tree fnattrs, bool implicit_p)
5555 {
5556 tree decl;
5557 const char *libname;
5558
5559 /* Preserve an already installed decl. It most likely was setup in advance
5560 (e.g. as part of the internal builtins) for specific reasons. */
5561 if (built_in_decls[(int) fncode] != NULL_TREE)
5562 return;
5563
5564 gcc_assert ((!both_p && !fallback_p)
5565 || !strncmp (name, "__builtin_",
5566 strlen ("__builtin_")));
5567
5568 libname = name + strlen ("__builtin_");
5569 decl = add_builtin_function (name, fntype, fncode, fnclass,
5570 (fallback_p ? libname : NULL),
5571 fnattrs);
5572 if (both_p)
5573 /* ??? This is normally further controlled by command-line options
5574 like -fno-builtin, but we don't have them for Ada. */
5575 add_builtin_function (libname, libtype, fncode, fnclass,
5576 NULL, fnattrs);
5577
5578 built_in_decls[(int) fncode] = decl;
5579 if (implicit_p)
5580 implicit_built_in_decls[(int) fncode] = decl;
5581 }
5582
5583 static int flag_isoc94 = 0;
5584 static int flag_isoc99 = 0;
5585
5586 /* Install what the common builtins.def offers. */
5587
5588 static void
5589 install_builtin_functions (void)
5590 {
5591 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5592 NONANSI_P, ATTRS, IMPLICIT, COND) \
5593 if (NAME && COND) \
5594 def_builtin_1 (ENUM, NAME, CLASS, \
5595 builtin_types[(int) TYPE], \
5596 builtin_types[(int) LIBTYPE], \
5597 BOTH_P, FALLBACK_P, NONANSI_P, \
5598 built_in_attributes[(int) ATTRS], IMPLICIT);
5599 #include "builtins.def"
5600 #undef DEF_BUILTIN
5601 }
5602
5603 /* ----------------------------------------------------------------------- *
5604 * BUILTIN FUNCTIONS *
5605 * ----------------------------------------------------------------------- */
5606
5607 /* Install the builtin functions we might need. */
5608
5609 void
5610 gnat_install_builtins (void)
5611 {
5612 install_builtin_elementary_types ();
5613 install_builtin_function_types ();
5614 install_builtin_attributes ();
5615
5616 /* Install builtins used by generic middle-end pieces first. Some of these
5617 know about internal specificities and control attributes accordingly, for
5618 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5619 the generic definition from builtins.def. */
5620 build_common_builtin_nodes ();
5621
5622 /* Now, install the target specific builtins, such as the AltiVec family on
5623 ppc, and the common set as exposed by builtins.def. */
5624 targetm.init_builtins ();
5625 install_builtin_functions ();
5626 }
5627
5628 #include "gt-ada-utils.h"
5629 #include "gtype-ada.h"