1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
37 #include "diagnostic-core.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
46 #include "diagnostic.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment
;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment
;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls
[(int) ADT_LAST
];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
85 /* Likewise, but with extra info for each of the possible raise reasons. */
86 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
88 /* Forward declarations for handlers of attributes. */
89 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
97 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
98 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
99 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
100 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table
[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute
,
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
116 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
137 { "may_alias", 0, 0, false, true, false, NULL
, false },
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
145 { NULL
, 0, 0, false, false, false, NULL
, false }
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
180 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level
*chain
;
191 /* The BLOCK node for this level. */
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level
*current_binding_level
;
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context
;
207 /* An array of global declarations. */
208 static GTY(()) vec
<tree
, va_gc
> *global_decls
;
210 /* An array of builtin function declarations. */
211 static GTY(()) vec
<tree
, va_gc
> *builtin_decls
;
213 /* An array of global renaming pointers. */
214 static GTY(()) vec
<tree
, va_gc
> *global_renaming_pointers
;
216 /* A chain of unused BLOCK nodes. */
217 static GTY((deletable
)) tree free_block_chain
;
219 static int pad_type_hash_marked_p (const void *p
);
220 static hashval_t
pad_type_hash_hash (const void *p
);
221 static int pad_type_hash_eq (const void *p1
, const void *p2
);
223 /* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY(()) pad_type_hash
{
230 static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash
)))
232 htab_t pad_type_hash_table
;
234 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
235 static tree
compute_related_constant (tree
, tree
);
236 static tree
split_plus (tree
, tree
*);
237 static tree
float_type_for_precision (int, enum machine_mode
);
238 static tree
convert_to_fat_pointer (tree
, tree
);
239 static unsigned int scale_by_factor_of (tree
, unsigned int);
240 static bool potential_alignment_gap (tree
, tree
, tree
);
242 /* Initialize data structures of the utils.c module. */
245 init_gnat_utils (void)
247 /* Initialize the association of GNAT nodes to GCC trees. */
248 associate_gnat_to_gnu
= ggc_cleared_vec_alloc
<tree
> (max_gnat_nodes
);
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table
= ggc_cleared_vec_alloc
<tree
> (max_gnat_nodes
);
253 /* Initialize the hash table of padded types. */
255 = htab_create_ggc (512, pad_type_hash_hash
, pad_type_hash_eq
, 0);
258 /* Destroy data structures of the utils.c module. */
261 destroy_gnat_utils (void)
263 /* Destroy the association of GNAT nodes to GCC trees. */
264 ggc_free (associate_gnat_to_gnu
);
265 associate_gnat_to_gnu
= NULL
;
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table
);
269 dummy_node_table
= NULL
;
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table
);
273 pad_type_hash_table
= NULL
;
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
279 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
283 If GNU_DECL is zero, reset a previous association. */
286 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
289 to something which is a decl. If that is not the case, this usually
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity
)
294 || (!no_check
&& !DECL_P (gnu_decl
)))));
296 SET_GNU_TREE (gnat_entity
, gnu_decl
);
299 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
306 get_gnu_tree (Entity_Id gnat_entity
)
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
309 return GET_GNU_TREE (gnat_entity
);
312 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
315 present_gnu_tree (Entity_Id gnat_entity
)
317 return PRESENT_GNU_TREE (gnat_entity
);
320 /* Make a dummy type corresponding to GNAT_TYPE. */
323 make_dummy_type (Entity_Id gnat_type
)
325 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying
))
330 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying
))
335 gnat_underlying
= gnat_type
;
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying
))
339 return GET_DUMMY_NODE (gnat_underlying
);
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
343 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
344 ? tree_code_for_record_type (gnat_underlying
)
346 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
347 TYPE_DUMMY_P (gnu_type
) = 1;
348 TYPE_STUB_DECL (gnu_type
)
349 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
350 if (Is_By_Reference_Type (gnat_underlying
))
351 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
353 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
358 /* Return the dummy type that was made for GNAT_TYPE, if any. */
361 get_dummy_type (Entity_Id gnat_type
)
363 return GET_DUMMY_NODE (gnat_type
);
366 /* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
370 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
372 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
373 tree gnu_fat_type
, fields
, gnu_object_type
;
375 gnu_template_type
= make_node (RECORD_TYPE
);
376 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
377 TYPE_DUMMY_P (gnu_template_type
) = 1;
378 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
380 gnu_array_type
= make_node (ENUMERAL_TYPE
);
381 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
382 TYPE_DUMMY_P (gnu_array_type
) = 1;
383 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
385 gnu_fat_type
= make_node (RECORD_TYPE
);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
388 TYPE_NAME (gnu_fat_type
)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
391 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
392 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
395 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type
, fields
);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
401 gnu_object_type
= make_node (RECORD_TYPE
);
402 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
403 TYPE_DUMMY_P (gnu_object_type
) = 1;
405 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
409 /* Return true if we are in the global binding level. */
412 global_bindings_p (void)
414 return force_global
|| current_function_decl
== NULL_TREE
;
417 /* Enter a new binding level. */
420 gnat_pushlevel (void)
422 struct gnat_binding_level
*newlevel
= NULL
;
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level
)
427 newlevel
= free_binding_level
;
428 free_binding_level
= free_binding_level
->chain
;
431 newlevel
= ggc_alloc
<gnat_binding_level
> ();
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain
)
436 newlevel
->block
= free_block_chain
;
437 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
438 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
441 newlevel
->block
= make_node (BLOCK
);
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level
)
445 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
447 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
448 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
449 TREE_USED (newlevel
->block
) = 1;
451 /* Add this level to the front of the chain (stack) of active levels. */
452 newlevel
->chain
= current_binding_level
;
453 newlevel
->jmpbuf_decl
= NULL_TREE
;
454 current_binding_level
= newlevel
;
457 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
461 set_current_block_context (tree fndecl
)
463 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
464 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
465 set_block_for_group (current_binding_level
->block
);
468 /* Set the jmpbuf_decl for the current binding level to DECL. */
471 set_block_jmpbuf_decl (tree decl
)
473 current_binding_level
->jmpbuf_decl
= decl
;
476 /* Get the jmpbuf_decl, if any, for the current binding level. */
479 get_block_jmpbuf_decl (void)
481 return current_binding_level
->jmpbuf_decl
;
484 /* Exit a binding level. Set any BLOCK into the current code group. */
489 struct gnat_binding_level
*level
= current_binding_level
;
490 tree block
= level
->block
;
492 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
493 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
497 parent block. Otherwise, add it to the list of its parent. */
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
500 else if (BLOCK_VARS (block
) == NULL_TREE
)
502 BLOCK_SUBBLOCKS (level
->chain
->block
)
503 = block_chainon (BLOCK_SUBBLOCKS (block
),
504 BLOCK_SUBBLOCKS (level
->chain
->block
));
505 BLOCK_CHAIN (block
) = free_block_chain
;
506 free_block_chain
= block
;
510 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
511 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
512 TREE_USED (block
) = 1;
513 set_block_for_group (block
);
516 /* Free this binding structure. */
517 current_binding_level
= level
->chain
;
518 level
->chain
= free_binding_level
;
519 free_binding_level
= level
;
522 /* Exit a binding level and discard the associated BLOCK. */
527 struct gnat_binding_level
*level
= current_binding_level
;
528 tree block
= level
->block
;
530 BLOCK_CHAIN (block
) = free_block_chain
;
531 free_block_chain
= block
;
533 /* Free this binding structure. */
534 current_binding_level
= level
->chain
;
535 level
->chain
= free_binding_level
;
536 free_binding_level
= level
;
539 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
542 gnat_set_type_context (tree type
, tree context
)
544 tree decl
= TYPE_STUB_DECL (type
);
546 TYPE_CONTEXT (type
) = context
;
548 while (decl
&& DECL_PARALLEL_TYPE (decl
))
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl
)) = context
;
551 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
555 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
559 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
561 /* If DECL is public external or at top level, it has global context. */
562 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
565 global_context
= build_translation_unit_decl (NULL_TREE
);
566 DECL_CONTEXT (decl
) = global_context
;
570 DECL_CONTEXT (decl
) = current_function_decl
;
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
577 DECL_STATIC_CHAIN (decl
) = 1;
580 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node
))
584 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
586 add_decl_expr (decl
, gnat_node
);
588 /* Put the declaration on the list. The list of declarations is in reverse
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl
) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
596 if (DECL_EXTERNAL (decl
))
598 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
599 vec_safe_push (builtin_decls
, decl
);
601 else if (global_bindings_p ())
602 vec_safe_push (global_decls
, decl
);
605 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
606 BLOCK_VARS (current_binding_level
->block
) = decl
;
610 /* For the declaration of a type, set its name if it either is not already
611 set or if the previous type name was not derived from a source name.
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
621 tree t
= TREE_TYPE (decl
);
623 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
625 /* Array and pointer types aren't "tagged" types so we force the
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
628 if (!DECL_ARTIFICIAL (decl
)
629 && (TREE_CODE (t
) == ARRAY_TYPE
630 || TREE_CODE (t
) == POINTER_TYPE
))
632 tree tt
= build_distinct_type_copy (t
);
633 if (TREE_CODE (t
) == POINTER_TYPE
)
634 TYPE_NEXT_PTR_TO (t
) = tt
;
635 TYPE_NAME (tt
) = DECL_NAME (decl
);
636 gnat_set_type_context (tt
, DECL_CONTEXT (decl
));
637 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
638 DECL_ORIGINAL_TYPE (decl
) = tt
;
641 else if (TYPE_IS_FAT_POINTER_P (t
))
643 /* We need a variant for the placeholder machinery to work. */
644 tree tt
= build_variant_type_copy (t
);
645 TYPE_NAME (tt
) = decl
;
646 gnat_set_type_context (tt
, DECL_CONTEXT (decl
));
647 TREE_USED (tt
) = TREE_USED (t
);
648 TREE_TYPE (decl
) = tt
;
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
650 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
652 DECL_ORIGINAL_TYPE (decl
) = t
;
653 DECL_ARTIFICIAL (decl
) = 0;
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
664 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
665 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
667 TYPE_NAME (t
) = decl
;
668 gnat_set_type_context (t
, DECL_CONTEXT (decl
));
673 /* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
680 make_aligning_type (tree type
, unsigned int align
, tree size
,
681 unsigned int base_align
, int room
, Node_Id gnat_node
)
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type
= make_node (RECORD_TYPE
);
687 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
690 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
692 /* The diagram below summarizes the shape of what we manipulate:
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
701 record_addr vblock_addr
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st
= size_int (room
);
706 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
707 tree voffset_st
, pos
, field
;
709 tree name
= TYPE_IDENTIFIER (type
);
711 name
= concat_name (name
, "ALIGN");
712 TYPE_NAME (record_type
) = name
;
714 /* Compute VOFFSET and then POS. The next byte position multiple of some
715 alignment after some address is obtained by "and"ing the alignment minus
716 1 with the two's complement of the address. */
717 voffset_st
= size_binop (BIT_AND_EXPR
,
718 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
719 size_int ((align
/ BITS_PER_UNIT
) - 1));
721 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
722 pos
= size_binop (MULT_EXPR
,
723 convert (bitsizetype
,
724 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
727 /* Craft the GCC record representation. We exceptionally do everything
728 manually here because 1) our generic circuitry is not quite ready to
729 handle the complex position/size expressions we are setting up, 2) we
730 have a strong simplifying factor at hand: we know the maximum possible
731 value of voffset, and 3) we have to set/reset at least the sizes in
732 accordance with this maximum value anyway, as we need them to convey
733 what should be "alloc"ated for this type.
735 Use -1 as the 'addressable' indication for the field to prevent the
736 creation of a bitfield. We don't need one, it would have damaging
737 consequences on the alignment computation, and create_field_decl would
738 make one without this special argument, for instance because of the
739 complex position expression. */
740 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
742 TYPE_FIELDS (record_type
) = field
;
744 TYPE_ALIGN (record_type
) = base_align
;
745 TYPE_USER_ALIGN (record_type
) = 1;
747 TYPE_SIZE (record_type
)
748 = size_binop (PLUS_EXPR
,
749 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
751 bitsize_int (align
+ room
* BITS_PER_UNIT
));
752 TYPE_SIZE_UNIT (record_type
)
753 = size_binop (PLUS_EXPR
, size
,
754 size_int (room
+ align
/ BITS_PER_UNIT
));
756 SET_TYPE_MODE (record_type
, BLKmode
);
757 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
759 /* Declare it now since it will never be declared otherwise. This is
760 necessary to ensure that its subtrees are properly marked. */
761 create_type_decl (name
, record_type
, true, false, gnat_node
);
766 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
767 as the field type of a packed record if IN_RECORD is true, or as the
768 component type of a packed array if IN_RECORD is false. See if we can
769 rewrite it either as a type that has a non-BLKmode, which we can pack
770 tighter in the packed record case, or as a smaller type. If so, return
771 the new type. If not, return the original type. */
774 make_packable_type (tree type
, bool in_record
)
776 unsigned HOST_WIDE_INT size
= tree_to_uhwi (TYPE_SIZE (type
));
777 unsigned HOST_WIDE_INT new_size
;
778 tree new_type
, old_field
, field_list
= NULL_TREE
;
781 /* No point in doing anything if the size is zero. */
785 new_type
= make_node (TREE_CODE (type
));
787 /* Copy the name and flags from the old type to that of the new.
788 Note that we rely on the pointer equality created here for
789 TYPE_NAME to look through conversions in various places. */
790 TYPE_NAME (new_type
) = TYPE_NAME (type
);
791 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
792 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
793 if (TREE_CODE (type
) == RECORD_TYPE
)
794 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
796 /* If we are in a record and have a small size, set the alignment to
797 try for an integral mode. Otherwise set it to try for a smaller
798 type with BLKmode. */
799 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
801 align
= ceil_pow2 (size
);
802 TYPE_ALIGN (new_type
) = align
;
803 new_size
= (size
+ align
- 1) & -align
;
807 unsigned HOST_WIDE_INT align
;
809 /* Do not try to shrink the size if the RM size is not constant. */
810 if (TYPE_CONTAINS_TEMPLATE_P (type
)
811 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type
)))
814 /* Round the RM size up to a unit boundary to get the minimal size
815 for a BLKmode record. Give up if it's already the size. */
816 new_size
= tree_to_uhwi (TYPE_ADA_SIZE (type
));
817 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
818 if (new_size
== size
)
821 align
= new_size
& -new_size
;
822 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
825 TYPE_USER_ALIGN (new_type
) = 1;
827 /* Now copy the fields, keeping the position and size as we don't want
828 to change the layout by propagating the packedness downwards. */
829 for (old_field
= TYPE_FIELDS (type
); old_field
;
830 old_field
= DECL_CHAIN (old_field
))
832 tree new_field_type
= TREE_TYPE (old_field
);
833 tree new_field
, new_size
;
835 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
836 && !TYPE_FAT_POINTER_P (new_field_type
)
837 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type
)))
838 new_field_type
= make_packable_type (new_field_type
, true);
840 /* However, for the last field in a not already packed record type
841 that is of an aggregate type, we need to use the RM size in the
842 packable version of the record type, see finish_record_type. */
843 if (!DECL_CHAIN (old_field
)
844 && !TYPE_PACKED (type
)
845 && RECORD_OR_UNION_TYPE_P (new_field_type
)
846 && !TYPE_FAT_POINTER_P (new_field_type
)
847 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
848 && TYPE_ADA_SIZE (new_field_type
))
849 new_size
= TYPE_ADA_SIZE (new_field_type
);
851 new_size
= DECL_SIZE (old_field
);
854 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
855 new_size
, bit_position (old_field
),
857 !DECL_NONADDRESSABLE_P (old_field
));
859 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
860 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
861 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
862 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
864 DECL_CHAIN (new_field
) = field_list
;
865 field_list
= new_field
;
868 finish_record_type (new_type
, nreverse (field_list
), 2, false);
869 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
870 if (TYPE_STUB_DECL (type
))
871 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
872 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
874 /* If this is a padding record, we never want to make the size smaller
875 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
876 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
878 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
879 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
884 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
885 TYPE_SIZE_UNIT (new_type
)
886 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
889 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
890 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
892 compute_record_mode (new_type
);
894 /* Try harder to get a packable type if necessary, for example
895 in case the record itself contains a BLKmode field. */
896 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
897 SET_TYPE_MODE (new_type
,
898 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
900 /* If neither the mode nor the size has shrunk, return the old type. */
901 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
907 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
908 If TYPE is the best type, return it. Otherwise, make a new type. We
909 only support new integral and pointer types. FOR_BIASED is true if
910 we are making a biased type. */
913 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
915 unsigned HOST_WIDE_INT size
;
919 /* If size indicates an error, just return TYPE to avoid propagating
920 the error. Likewise if it's too large to represent. */
921 if (!size_tree
|| !tree_fits_uhwi_p (size_tree
))
924 size
= tree_to_uhwi (size_tree
);
926 switch (TREE_CODE (type
))
931 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
932 && TYPE_BIASED_REPRESENTATION_P (type
));
934 /* Integer types with precision 0 are forbidden. */
938 /* Only do something if the type isn't a packed array type and doesn't
939 already have the proper size and the size isn't too large. */
940 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
941 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
942 || size
> LONG_LONG_TYPE_SIZE
)
945 biased_p
|= for_biased
;
946 if (TYPE_UNSIGNED (type
) || biased_p
)
947 new_type
= make_unsigned_type (size
);
949 new_type
= make_signed_type (size
);
950 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
951 SET_TYPE_RM_MIN_VALUE (new_type
,
952 convert (TREE_TYPE (new_type
),
953 TYPE_MIN_VALUE (type
)));
954 SET_TYPE_RM_MAX_VALUE (new_type
,
955 convert (TREE_TYPE (new_type
),
956 TYPE_MAX_VALUE (type
)));
957 /* Copy the name to show that it's essentially the same type and
958 not a subrange type. */
959 TYPE_NAME (new_type
) = TYPE_NAME (type
);
960 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
961 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
965 /* Do something if this is a fat pointer, in which case we
966 may need to return the thin pointer. */
967 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
969 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
970 if (!targetm
.valid_pointer_mode (p_mode
))
973 build_pointer_type_for_mode
974 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
980 /* Only do something if this is a thin pointer, in which case we
981 may need to return the fat pointer. */
982 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
984 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
994 /* See if the data pointed to by the hash table slot is marked. */
997 pad_type_hash_marked_p (const void *p
)
999 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
1001 return ggc_marked_p (type
);
1004 /* Return the cached hash value. */
1007 pad_type_hash_hash (const void *p
)
1009 return ((const struct pad_type_hash
*) p
)->hash
;
1012 /* Return 1 iff the padded types are equivalent. */
1015 pad_type_hash_eq (const void *p1
, const void *p2
)
1017 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
1018 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
1021 if (t1
->hash
!= t2
->hash
)
1027 /* We consider that the padded types are equivalent if they pad the same
1028 type and have the same size, alignment and RM size. Taking the mode
1029 into account is redundant since it is determined by the others. */
1031 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1032 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1033 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1034 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1037 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1038 if needed. We have already verified that SIZE and TYPE are large enough.
1039 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1040 IS_COMPONENT_TYPE is true if this is being done for the component type of
1041 an array. IS_USER_TYPE is true if the original type needs to be completed.
1042 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1043 the RM size of the resulting type is to be set to SIZE too. */
1046 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1047 Entity_Id gnat_entity
, bool is_component_type
,
1048 bool is_user_type
, bool definition
, bool set_rm_size
)
1050 tree orig_size
= TYPE_SIZE (type
);
1051 unsigned int orig_align
= TYPE_ALIGN (type
);
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type
))
1062 || operand_equal_p (round_up (size
, orig_align
), orig_size
, 0))
1063 && (align
== 0 || align
== orig_align
))
1071 type
= TREE_TYPE (TYPE_FIELDS (type
));
1072 orig_size
= TYPE_SIZE (type
);
1073 orig_align
= TYPE_ALIGN (type
);
1076 /* If the size is either not being changed or is being made smaller (which
1077 is not done here and is only valid for bitfields anyway), show the size
1078 isn't changing. Likewise, clear the alignment if it isn't being
1079 changed. Then return if we aren't doing anything. */
1081 && (operand_equal_p (size
, orig_size
, 0)
1082 || (TREE_CODE (orig_size
) == INTEGER_CST
1083 && tree_int_cst_lt (size
, orig_size
))))
1086 if (align
== orig_align
)
1089 if (align
== 0 && !size
)
1092 /* If requested, complete the original type and give it a name. */
1094 create_type_decl (get_entity_name (gnat_entity
), type
,
1095 !Comes_From_Source (gnat_entity
),
1097 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1098 && DECL_IGNORED_P (TYPE_NAME (type
))),
1101 /* We used to modify the record in place in some cases, but that could
1102 generate incorrect debugging information. So make a new record
1104 record
= make_node (RECORD_TYPE
);
1105 TYPE_PADDING_P (record
) = 1;
1107 if (Present (gnat_entity
))
1108 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1110 TYPE_ALIGN (record
) = align
? align
: orig_align
;
1111 TYPE_SIZE (record
) = size
? size
: orig_size
;
1112 TYPE_SIZE_UNIT (record
)
1113 = convert (sizetype
,
1114 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1115 bitsize_unit_node
));
1117 /* If we are changing the alignment and the input type is a record with
1118 BLKmode and a small constant size, try to make a form that has an
1119 integral mode. This might allow the padding record to also have an
1120 integral mode, which will be much more efficient. There is no point
1121 in doing so if a size is specified unless it is also a small constant
1122 size and it is incorrect to do so if we cannot guarantee that the mode
1123 will be naturally aligned since the field must always be addressable.
1125 ??? This might not always be a win when done for a stand-alone object:
1126 since the nominal and the effective type of the object will now have
1127 different modes, a VIEW_CONVERT_EXPR will be required for converting
1128 between them and it might be hard to overcome afterwards, including
1129 at the RTL level when the stand-alone object is accessed as a whole. */
1131 && RECORD_OR_UNION_TYPE_P (type
)
1132 && TYPE_MODE (type
) == BLKmode
1133 && !TYPE_BY_REFERENCE_P (type
)
1134 && TREE_CODE (orig_size
) == INTEGER_CST
1135 && !TREE_OVERFLOW (orig_size
)
1136 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1138 || (TREE_CODE (size
) == INTEGER_CST
1139 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1141 tree packable_type
= make_packable_type (type
, true);
1142 if (TYPE_MODE (packable_type
) != BLKmode
1143 && align
>= TYPE_ALIGN (packable_type
))
1144 type
= packable_type
;
1147 /* Now create the field with the original size. */
1148 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1149 bitsize_zero_node
, 0, 1);
1150 DECL_INTERNAL_P (field
) = 1;
1152 /* Do not emit debug info until after the auxiliary record is built. */
1153 finish_record_type (record
, field
, 1, false);
1155 /* Set the RM size if requested. */
1158 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1160 /* If the padded type is complete and has constant size, we canonicalize
1161 it by means of the hash table. This is consistent with the language
1162 semantics and ensures that gigi and the middle-end have a common view
1163 of these padded types. */
1164 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1167 struct pad_type_hash in
, *h
;
1170 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1171 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1172 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1173 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1177 h
= (struct pad_type_hash
*)
1178 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1185 h
= ggc_alloc
<pad_type_hash
> ();
1188 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1194 /* Unless debugging information isn't being written for the input type,
1195 write a record that shows what we are a subtype of and also make a
1196 variable that indicates our size, if still variable. */
1197 if (TREE_CODE (orig_size
) != INTEGER_CST
1198 && TYPE_NAME (record
)
1200 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1201 && DECL_IGNORED_P (TYPE_NAME (type
))))
1203 tree marker
= make_node (RECORD_TYPE
);
1204 tree name
= TYPE_IDENTIFIER (record
);
1205 tree orig_name
= TYPE_IDENTIFIER (type
);
1207 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1208 finish_record_type (marker
,
1209 create_field_decl (orig_name
,
1210 build_reference_type (type
),
1211 marker
, NULL_TREE
, NULL_TREE
,
1215 add_parallel_type (record
, marker
);
1217 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1218 TYPE_SIZE_UNIT (marker
)
1219 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1220 TYPE_SIZE_UNIT (record
), false, false, false,
1221 false, NULL
, gnat_entity
);
1224 rest_of_record_type_compilation (record
);
1227 /* If the size was widened explicitly, maybe give a warning. Take the
1228 original size as the maximum size of the input if there was an
1229 unconstrained record involved and round it up to the specified alignment,
1230 if one was specified. But don't do it if we are just annotating types
1231 and the type is tagged, since tagged types aren't fully laid out in this
1234 || TREE_CODE (size
) == COND_EXPR
1235 || TREE_CODE (size
) == MAX_EXPR
1237 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1240 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1241 orig_size
= max_size (orig_size
, true);
1244 orig_size
= round_up (orig_size
, align
);
1246 if (!operand_equal_p (size
, orig_size
, 0)
1247 && !(TREE_CODE (size
) == INTEGER_CST
1248 && TREE_CODE (orig_size
) == INTEGER_CST
1249 && (TREE_OVERFLOW (size
)
1250 || TREE_OVERFLOW (orig_size
)
1251 || tree_int_cst_lt (size
, orig_size
))))
1253 Node_Id gnat_error_node
= Empty
;
1255 if (Is_Packed_Array_Type (gnat_entity
))
1256 gnat_entity
= Original_Array_Type (gnat_entity
);
1258 if ((Ekind (gnat_entity
) == E_Component
1259 || Ekind (gnat_entity
) == E_Discriminant
)
1260 && Present (Component_Clause (gnat_entity
)))
1261 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1262 else if (Present (Size_Clause (gnat_entity
)))
1263 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1265 /* Generate message only for entities that come from source, since
1266 if we have an entity created by expansion, the message will be
1267 generated for some other corresponding source entity. */
1268 if (Comes_From_Source (gnat_entity
))
1270 if (Present (gnat_error_node
))
1271 post_error_ne_tree ("{^ }bits of & unused?",
1272 gnat_error_node
, gnat_entity
,
1273 size_diffop (size
, orig_size
));
1274 else if (is_component_type
)
1275 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1276 gnat_entity
, gnat_entity
,
1277 size_diffop (size
, orig_size
));
1284 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1285 If this is a multi-dimensional array type, do this recursively.
1288 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1289 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1290 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1293 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1295 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1296 of a one-dimensional array, since the padding has the same alias set
1297 as the field type, but if it's a multi-dimensional array, we need to
1298 see the inner types. */
1299 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1300 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1301 || TYPE_PADDING_P (gnu_old_type
)))
1302 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1304 /* Unconstrained array types are deemed incomplete and would thus be given
1305 alias set 0. Retrieve the underlying array type. */
1306 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1308 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1309 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1311 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1313 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1314 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1315 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1316 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1320 case ALIAS_SET_COPY
:
1321 /* The alias set shouldn't be copied between array types with different
1322 aliasing settings because this can break the aliasing relationship
1323 between the array type and its element type. */
1324 #ifndef ENABLE_CHECKING
1325 if (flag_strict_aliasing
)
1327 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1328 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1329 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1330 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1332 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1335 case ALIAS_SET_SUBSET
:
1336 case ALIAS_SET_SUPERSET
:
1338 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1339 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1341 /* Do nothing if the alias sets conflict. This ensures that we
1342 never call record_alias_subset several times for the same pair
1343 or at all for alias set 0. */
1344 if (!alias_sets_conflict_p (old_set
, new_set
))
1346 if (op
== ALIAS_SET_SUBSET
)
1347 record_alias_subset (old_set
, new_set
);
1349 record_alias_subset (new_set
, old_set
);
1358 record_component_aliases (gnu_new_type
);
1361 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1362 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1365 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1367 tree type_decl
= build_decl (input_location
,
1368 TYPE_DECL
, get_identifier (name
), type
);
1369 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1370 TYPE_ARTIFICIAL (type
) = artificial_p
;
1371 gnat_pushdecl (type_decl
, Empty
);
1373 if (debug_hooks
->type_decl
)
1374 debug_hooks
->type_decl (type_decl
, false);
1377 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1378 finish constructing the record type as a fat pointer type. */
1381 finish_fat_pointer_type (tree record_type
, tree field_list
)
1383 /* Make sure we can put it into a register. */
1384 if (STRICT_ALIGNMENT
)
1385 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1387 /* Show what it really is. */
1388 TYPE_FAT_POINTER_P (record_type
) = 1;
1390 /* Do not emit debug info for it since the types of its fields may still be
1391 incomplete at this point. */
1392 finish_record_type (record_type
, field_list
, 0, false);
1394 /* Force type_contains_placeholder_p to return true on it. Although the
1395 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1396 type but the representation of the unconstrained array. */
1397 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1400 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1401 finish constructing the record or union type. If REP_LEVEL is zero, this
1402 record has no representation clause and so will be entirely laid out here.
1403 If REP_LEVEL is one, this record has a representation clause and has been
1404 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1405 this record is derived from a parent record and thus inherits its layout;
1406 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1407 we need to write debug information about this type. */
1410 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1413 enum tree_code code
= TREE_CODE (record_type
);
1414 tree name
= TYPE_IDENTIFIER (record_type
);
1415 tree ada_size
= bitsize_zero_node
;
1416 tree size
= bitsize_zero_node
;
1417 bool had_size
= TYPE_SIZE (record_type
) != 0;
1418 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1419 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1422 TYPE_FIELDS (record_type
) = field_list
;
1424 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1425 generate debug info and have a parallel type. */
1426 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1428 /* Globally initialize the record first. If this is a rep'ed record,
1429 that just means some initializations; otherwise, layout the record. */
1432 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1435 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1438 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1440 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1441 out just like a UNION_TYPE, since the size will be fixed. */
1442 else if (code
== QUAL_UNION_TYPE
)
1447 /* Ensure there isn't a size already set. There can be in an error
1448 case where there is a rep clause but all fields have errors and
1449 no longer have a position. */
1450 TYPE_SIZE (record_type
) = 0;
1452 /* Ensure we use the traditional GCC layout for bitfields when we need
1453 to pack the record type or have a representation clause. The other
1454 possible layout (Microsoft C compiler), if available, would prevent
1455 efficient packing in almost all cases. */
1456 #ifdef TARGET_MS_BITFIELD_LAYOUT
1457 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1458 decl_attributes (&record_type
,
1459 tree_cons (get_identifier ("gcc_struct"),
1460 NULL_TREE
, NULL_TREE
),
1461 ATTR_FLAG_TYPE_IN_PLACE
);
1464 layout_type (record_type
);
1467 /* At this point, the position and size of each field is known. It was
1468 either set before entry by a rep clause, or by laying out the type above.
1470 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1471 to compute the Ada size; the GCC size and alignment (for rep'ed records
1472 that are not padding types); and the mode (for rep'ed records). We also
1473 clear the DECL_BIT_FIELD indication for the cases we know have not been
1474 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1476 if (code
== QUAL_UNION_TYPE
)
1477 field_list
= nreverse (field_list
);
1479 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1481 tree type
= TREE_TYPE (field
);
1482 tree pos
= bit_position (field
);
1483 tree this_size
= DECL_SIZE (field
);
1486 if (RECORD_OR_UNION_TYPE_P (type
)
1487 && !TYPE_FAT_POINTER_P (type
)
1488 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1489 && TYPE_ADA_SIZE (type
))
1490 this_ada_size
= TYPE_ADA_SIZE (type
);
1492 this_ada_size
= this_size
;
1494 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1495 if (DECL_BIT_FIELD (field
)
1496 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1498 unsigned int align
= TYPE_ALIGN (type
);
1500 /* In the general case, type alignment is required. */
1501 if (value_factor_p (pos
, align
))
1503 /* The enclosing record type must be sufficiently aligned.
1504 Otherwise, if no alignment was specified for it and it
1505 has been laid out already, bump its alignment to the
1506 desired one if this is compatible with its size. */
1507 if (TYPE_ALIGN (record_type
) >= align
)
1509 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1510 DECL_BIT_FIELD (field
) = 0;
1514 && value_factor_p (TYPE_SIZE (record_type
), align
))
1516 TYPE_ALIGN (record_type
) = align
;
1517 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1518 DECL_BIT_FIELD (field
) = 0;
1522 /* In the non-strict alignment case, only byte alignment is. */
1523 if (!STRICT_ALIGNMENT
1524 && DECL_BIT_FIELD (field
)
1525 && value_factor_p (pos
, BITS_PER_UNIT
))
1526 DECL_BIT_FIELD (field
) = 0;
1529 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1530 field is technically not addressable. Except that it can actually
1531 be addressed if it is BLKmode and happens to be properly aligned. */
1532 if (DECL_BIT_FIELD (field
)
1533 && !(DECL_MODE (field
) == BLKmode
1534 && value_factor_p (pos
, BITS_PER_UNIT
)))
1535 DECL_NONADDRESSABLE_P (field
) = 1;
1537 /* A type must be as aligned as its most aligned field that is not
1538 a bit-field. But this is already enforced by layout_type. */
1539 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1540 TYPE_ALIGN (record_type
)
1541 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1546 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1547 size
= size_binop (MAX_EXPR
, size
, this_size
);
1550 case QUAL_UNION_TYPE
:
1552 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1553 this_ada_size
, ada_size
);
1554 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1559 /* Since we know here that all fields are sorted in order of
1560 increasing bit position, the size of the record is one
1561 higher than the ending bit of the last field processed
1562 unless we have a rep clause, since in that case we might
1563 have a field outside a QUAL_UNION_TYPE that has a higher ending
1564 position. So use a MAX in that case. Also, if this field is a
1565 QUAL_UNION_TYPE, we need to take into account the previous size in
1566 the case of empty variants. */
1568 = merge_sizes (ada_size
, pos
, this_ada_size
,
1569 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1571 = merge_sizes (size
, pos
, this_size
,
1572 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1580 if (code
== QUAL_UNION_TYPE
)
1581 nreverse (field_list
);
1585 /* If this is a padding record, we never want to make the size smaller
1586 than what was specified in it, if any. */
1587 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1588 size
= TYPE_SIZE (record_type
);
1590 /* Now set any of the values we've just computed that apply. */
1591 if (!TYPE_FAT_POINTER_P (record_type
)
1592 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1593 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1597 tree size_unit
= had_size_unit
1598 ? TYPE_SIZE_UNIT (record_type
)
1599 : convert (sizetype
,
1600 size_binop (CEIL_DIV_EXPR
, size
,
1601 bitsize_unit_node
));
1602 unsigned int align
= TYPE_ALIGN (record_type
);
1604 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1605 TYPE_SIZE_UNIT (record_type
)
1606 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1608 compute_record_mode (record_type
);
1613 rest_of_record_type_compilation (record_type
);
1616 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1619 add_parallel_type (tree type
, tree parallel_type
)
1621 tree decl
= TYPE_STUB_DECL (type
);
1623 while (DECL_PARALLEL_TYPE (decl
))
1624 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1626 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1629 /* Return true if TYPE has a parallel type. */
1632 has_parallel_type (tree type
)
1634 tree decl
= TYPE_STUB_DECL (type
);
1636 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1639 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1640 associated with it. It need not be invoked directly in most cases since
1641 finish_record_type takes care of doing so, but this can be necessary if
1642 a parallel type is to be attached to the record type. */
1645 rest_of_record_type_compilation (tree record_type
)
1647 bool var_size
= false;
1650 /* If this is a padded type, the bulk of the debug info has already been
1651 generated for the field's type. */
1652 if (TYPE_IS_PADDING_P (record_type
))
1655 /* If the type already has a parallel type (XVS type), then we're done. */
1656 if (has_parallel_type (record_type
))
1659 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1661 /* We need to make an XVE/XVU record if any field has variable size,
1662 whether or not the record does. For example, if we have a union,
1663 it may be that all fields, rounded up to the alignment, have the
1664 same size, in which case we'll use that size. But the debug
1665 output routines (except Dwarf2) won't be able to output the fields,
1666 so we need to make the special record. */
1667 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1668 /* If a field has a non-constant qualifier, the record will have
1669 variable size too. */
1670 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1671 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1678 /* If this record type is of variable size, make a parallel record type that
1679 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1682 tree new_record_type
1683 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1684 ? UNION_TYPE
: TREE_CODE (record_type
));
1685 tree orig_name
= TYPE_IDENTIFIER (record_type
), new_name
;
1686 tree last_pos
= bitsize_zero_node
;
1687 tree old_field
, prev_old_field
= NULL_TREE
;
1690 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1692 TYPE_NAME (new_record_type
) = new_name
;
1693 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1694 TYPE_STUB_DECL (new_record_type
)
1695 = create_type_stub_decl (new_name
, new_record_type
);
1696 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1697 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1698 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1699 TYPE_SIZE_UNIT (new_record_type
)
1700 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1702 /* Now scan all the fields, replacing each field with a new field
1703 corresponding to the new encoding. */
1704 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1705 old_field
= DECL_CHAIN (old_field
))
1707 tree field_type
= TREE_TYPE (old_field
);
1708 tree field_name
= DECL_NAME (old_field
);
1709 tree curpos
= bit_position (old_field
);
1710 tree pos
, new_field
;
1712 unsigned int align
= 0;
1714 /* We're going to do some pattern matching below so remove as many
1715 conversions as possible. */
1716 curpos
= remove_conversions (curpos
, true);
1718 /* See how the position was modified from the last position.
1720 There are two basic cases we support: a value was added
1721 to the last position or the last position was rounded to
1722 a boundary and they something was added. Check for the
1723 first case first. If not, see if there is any evidence
1724 of rounding. If so, round the last position and retry.
1726 If this is a union, the position can be taken as zero. */
1727 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1728 pos
= bitsize_zero_node
;
1730 pos
= compute_related_constant (curpos
, last_pos
);
1733 && TREE_CODE (curpos
) == MULT_EXPR
1734 && tree_fits_uhwi_p (TREE_OPERAND (curpos
, 1)))
1736 tree offset
= TREE_OPERAND (curpos
, 0);
1737 align
= tree_to_uhwi (TREE_OPERAND (curpos
, 1));
1738 align
= scale_by_factor_of (offset
, align
);
1739 last_pos
= round_up (last_pos
, align
);
1740 pos
= compute_related_constant (curpos
, last_pos
);
1743 && TREE_CODE (curpos
) == PLUS_EXPR
1744 && tree_fits_uhwi_p (TREE_OPERAND (curpos
, 1))
1745 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1747 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1)))
1749 tree offset
= TREE_OPERAND (TREE_OPERAND (curpos
, 0), 0);
1750 unsigned HOST_WIDE_INT addend
1751 = tree_to_uhwi (TREE_OPERAND (curpos
, 1));
1753 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1));
1754 align
= scale_by_factor_of (offset
, align
);
1755 align
= MIN (align
, addend
& -addend
);
1756 last_pos
= round_up (last_pos
, align
);
1757 pos
= compute_related_constant (curpos
, last_pos
);
1759 else if (potential_alignment_gap (prev_old_field
, old_field
, pos
))
1761 align
= TYPE_ALIGN (field_type
);
1762 last_pos
= round_up (last_pos
, align
);
1763 pos
= compute_related_constant (curpos
, last_pos
);
1766 /* If we can't compute a position, set it to zero.
1768 ??? We really should abort here, but it's too much work
1769 to get this correct for all cases. */
1771 pos
= bitsize_zero_node
;
1773 /* See if this type is variable-sized and make a pointer type
1774 and indicate the indirection if so. Beware that the debug
1775 back-end may adjust the position computed above according
1776 to the alignment of the field type, i.e. the pointer type
1777 in this case, if we don't preventively counter that. */
1778 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1780 field_type
= build_pointer_type (field_type
);
1781 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1783 field_type
= copy_node (field_type
);
1784 TYPE_ALIGN (field_type
) = align
;
1789 /* Make a new field name, if necessary. */
1790 if (var
|| align
!= 0)
1795 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1796 align
/ BITS_PER_UNIT
);
1798 strcpy (suffix
, "XVL");
1800 field_name
= concat_name (field_name
, suffix
);
1804 = create_field_decl (field_name
, field_type
, new_record_type
,
1805 DECL_SIZE (old_field
), pos
, 0, 0);
1806 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1807 TYPE_FIELDS (new_record_type
) = new_field
;
1809 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1810 zero. The only time it's not the last field of the record
1811 is when there are other components at fixed positions after
1812 it (meaning there was a rep clause for every field) and we
1813 want to be able to encode them. */
1814 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1815 (TREE_CODE (TREE_TYPE (old_field
))
1818 : DECL_SIZE (old_field
));
1819 prev_old_field
= old_field
;
1822 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1824 add_parallel_type (record_type
, new_record_type
);
1828 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1829 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1830 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1831 replace a value of zero with the old size. If HAS_REP is true, we take the
1832 MAX of the end position of this field with LAST_SIZE. In all other cases,
1833 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1836 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1839 tree type
= TREE_TYPE (last_size
);
1842 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1844 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1846 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1850 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1851 integer_zerop (TREE_OPERAND (size
, 1))
1852 ? last_size
: merge_sizes (last_size
, first_bit
,
1853 TREE_OPERAND (size
, 1),
1855 integer_zerop (TREE_OPERAND (size
, 2))
1856 ? last_size
: merge_sizes (last_size
, first_bit
,
1857 TREE_OPERAND (size
, 2),
1860 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1861 when fed through substitute_in_expr) into thinking that a constant
1862 size is not constant. */
1863 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1864 new_size
= TREE_OPERAND (new_size
, 0);
1869 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1870 related by the addition of a constant. Return that constant if so. */
1873 compute_related_constant (tree op0
, tree op1
)
1875 tree op0_var
, op1_var
;
1876 tree op0_con
= split_plus (op0
, &op0_var
);
1877 tree op1_con
= split_plus (op1
, &op1_var
);
1878 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1880 if (operand_equal_p (op0_var
, op1_var
, 0))
1882 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1888 /* Utility function of above to split a tree OP which may be a sum, into a
1889 constant part, which is returned, and a variable part, which is stored
1890 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1894 split_plus (tree in
, tree
*pvar
)
1896 /* Strip conversions in order to ease the tree traversal and maximize the
1897 potential for constant or plus/minus discovery. We need to be careful
1898 to always return and set *pvar to bitsizetype trees, but it's worth
1900 in
= remove_conversions (in
, false);
1902 *pvar
= convert (bitsizetype
, in
);
1904 if (TREE_CODE (in
) == INTEGER_CST
)
1906 *pvar
= bitsize_zero_node
;
1907 return convert (bitsizetype
, in
);
1909 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1911 tree lhs_var
, rhs_var
;
1912 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1913 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1915 if (lhs_var
== TREE_OPERAND (in
, 0)
1916 && rhs_var
== TREE_OPERAND (in
, 1))
1917 return bitsize_zero_node
;
1919 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1920 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1923 return bitsize_zero_node
;
1926 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1927 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1928 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1929 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1930 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1931 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1932 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1933 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1934 invisible reference. */
1937 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1938 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1939 bool return_by_invisi_ref_p
)
1941 /* A list of the data type nodes of the subprogram formal parameters.
1942 This list is generated by traversing the input list of PARM_DECL
1944 vec
<tree
, va_gc
> *param_type_list
= NULL
;
1947 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1948 vec_safe_push (param_type_list
, TREE_TYPE (t
));
1950 type
= build_function_type_vec (return_type
, param_type_list
);
1952 /* TYPE may have been shared since GCC hashes types. If it has a different
1953 CICO_LIST, make a copy. Likewise for the various flags. */
1954 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1955 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1957 type
= copy_type (type
);
1958 TYPE_CI_CO_LIST (type
) = cico_list
;
1959 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1960 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1961 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1967 /* Return a copy of TYPE but safe to modify in any way. */
1970 copy_type (tree type
)
1972 tree new_type
= copy_node (type
);
1974 /* Unshare the language-specific data. */
1975 if (TYPE_LANG_SPECIFIC (type
))
1977 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1978 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1981 /* And the contents of the language-specific slot if needed. */
1982 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1983 && TYPE_RM_VALUES (type
))
1985 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1986 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1987 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1988 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1991 /* copy_node clears this field instead of copying it, because it is
1992 aliased with TREE_CHAIN. */
1993 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
1995 TYPE_POINTER_TO (new_type
) = 0;
1996 TYPE_REFERENCE_TO (new_type
) = 0;
1997 TYPE_MAIN_VARIANT (new_type
) = new_type
;
1998 TYPE_NEXT_VARIANT (new_type
) = 0;
2003 /* Return a subtype of sizetype with range MIN to MAX and whose
2004 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2005 of the associated TYPE_DECL. */
2008 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2010 /* First build a type for the desired range. */
2011 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2013 /* Then set the index type. */
2014 SET_TYPE_INDEX_TYPE (type
, index
);
2015 create_type_decl (NULL_TREE
, type
, true, false, gnat_node
);
2020 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2021 sizetype is used. */
2024 create_range_type (tree type
, tree min
, tree max
)
2028 if (type
== NULL_TREE
)
2031 /* First build a type with the base range. */
2032 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2033 TYPE_MAX_VALUE (type
));
2035 /* Then set the actual range. */
2036 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2037 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2042 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2043 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2047 create_type_stub_decl (tree type_name
, tree type
)
2049 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2050 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2051 emitted in DWARF. */
2052 tree type_decl
= build_decl (input_location
, TYPE_DECL
, type_name
, type
);
2053 DECL_ARTIFICIAL (type_decl
) = 1;
2054 TYPE_ARTIFICIAL (type
) = 1;
2058 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2059 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2060 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2061 true if we need to write debug information about this type. GNAT_NODE
2062 is used for the position of the decl. */
2065 create_type_decl (tree type_name
, tree type
, bool artificial_p
,
2066 bool debug_info_p
, Node_Id gnat_node
)
2068 enum tree_code code
= TREE_CODE (type
);
2069 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2072 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2073 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2075 /* If the type hasn't been named yet, we're naming it; preserve an existing
2076 TYPE_STUB_DECL that has been attached to it for some purpose. */
2077 if (!named
&& TYPE_STUB_DECL (type
))
2079 type_decl
= TYPE_STUB_DECL (type
);
2080 DECL_NAME (type_decl
) = type_name
;
2083 type_decl
= build_decl (input_location
, TYPE_DECL
, type_name
, type
);
2085 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2086 TYPE_ARTIFICIAL (type
) = artificial_p
;
2088 /* Add this decl to the current binding level. */
2089 gnat_pushdecl (type_decl
, gnat_node
);
2091 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2092 This causes the name to be also viewed as a "tag" by the debug
2093 back-end, with the advantage that no DW_TAG_typedef is emitted
2094 for artificial "tagged" types in DWARF. */
2096 TYPE_STUB_DECL (type
) = type_decl
;
2098 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2099 back-end doesn't support, and for others if we don't need to. */
2100 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2101 DECL_IGNORED_P (type_decl
) = 1;
2106 /* Return a VAR_DECL or CONST_DECL node.
2108 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2109 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2110 the GCC tree for an optional initial expression; NULL_TREE if none.
2112 CONST_FLAG is true if this variable is constant, in which case we might
2113 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2115 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2116 definition to be made visible outside of the current compilation unit, for
2117 instance variable definitions in a package specification.
2119 EXTERN_FLAG is true when processing an external variable declaration (as
2120 opposed to a definition: no storage is to be allocated for the variable).
2122 STATIC_FLAG is only relevant when not at top level. In that case
2123 it indicates whether to always allocate storage to the variable.
2125 GNAT_NODE is used for the position of the decl. */
2128 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2129 bool const_flag
, bool public_flag
, bool extern_flag
,
2130 bool static_flag
, bool const_decl_allowed_p
,
2131 struct attrib
*attr_list
, Node_Id gnat_node
)
2133 /* Whether the initializer is a constant initializer. At the global level
2134 or for an external object or an object to be allocated in static memory,
2135 we check that it is a valid constant expression for use in initializing
2136 a static variable; otherwise, we only check that it is constant. */
2139 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2140 && (global_bindings_p () || extern_flag
|| static_flag
2141 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2142 : TREE_CONSTANT (var_init
)));
2144 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2145 case the initializer may be used in-lieu of the DECL node (as done in
2146 Identifier_to_gnu). This is useful to prevent the need of elaboration
2147 code when an identifier for which such a decl is made is in turn used as
2148 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2149 but extra constraints apply to this choice (see below) and are not
2150 relevant to the distinction we wish to make. */
2151 bool constant_p
= const_flag
&& init_const
;
2153 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2154 and may be used for scalars in general but not for aggregates. */
2156 = build_decl (input_location
,
2157 (constant_p
&& const_decl_allowed_p
2158 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2161 /* If this is external, throw away any initializations (they will be done
2162 elsewhere) unless this is a constant for which we would like to remain
2163 able to get the initializer. If we are defining a global here, leave a
2164 constant initialization and save any variable elaborations for the
2165 elaboration routine. If we are just annotating types, throw away the
2166 initialization if it isn't a constant. */
2167 if ((extern_flag
&& !constant_p
)
2168 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2169 var_init
= NULL_TREE
;
2171 /* At the global level, an initializer requiring code to be generated
2172 produces elaboration statements. Check that such statements are allowed,
2173 that is, not violating a No_Elaboration_Code restriction. */
2174 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2175 Check_Elaboration_Code_Allowed (gnat_node
);
2177 DECL_INITIAL (var_decl
) = var_init
;
2178 TREE_READONLY (var_decl
) = const_flag
;
2179 DECL_EXTERNAL (var_decl
) = extern_flag
;
2180 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2181 TREE_CONSTANT (var_decl
) = constant_p
;
2182 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2183 = TYPE_VOLATILE (type
);
2185 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2186 try to fiddle with DECL_COMMON. However, on platforms that don't
2187 support global BSS sections, uninitialized global variables would
2188 go in DATA instead, thus increasing the size of the executable. */
2190 && TREE_CODE (var_decl
) == VAR_DECL
2191 && TREE_PUBLIC (var_decl
)
2192 && !have_global_bss_p ())
2193 DECL_COMMON (var_decl
) = 1;
2195 /* At the global binding level, we need to allocate static storage for the
2196 variable if it isn't external. Otherwise, we allocate automatic storage
2197 unless requested not to. */
2198 TREE_STATIC (var_decl
)
2199 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2201 /* For an external constant whose initializer is not absolute, do not emit
2202 debug info. In DWARF this would mean a global relocation in a read-only
2203 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2207 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2208 != null_pointer_node
)
2209 DECL_IGNORED_P (var_decl
) = 1;
2211 if (TREE_SIDE_EFFECTS (var_decl
))
2212 TREE_ADDRESSABLE (var_decl
) = 1;
2214 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2215 if (TREE_CODE (var_decl
) == VAR_DECL
)
2216 process_attributes (&var_decl
, &attr_list
, true, gnat_node
);
2218 /* Add this decl to the current binding level. */
2219 gnat_pushdecl (var_decl
, gnat_node
);
2221 if (TREE_CODE (var_decl
) == VAR_DECL
)
2224 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2226 if (global_bindings_p ())
2227 rest_of_decl_compilation (var_decl
, true, 0);
2233 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2236 aggregate_type_contains_array_p (tree type
)
2238 switch (TREE_CODE (type
))
2242 case QUAL_UNION_TYPE
:
2245 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2246 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2247 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2260 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2261 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2262 nonzero, it is the specified size of the field. If POS is nonzero, it is
2263 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2264 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2265 means we are allowed to take the address of the field; if it is negative,
2266 we should not make a bitfield, which is used by make_aligning_type. */
2269 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2270 tree size
, tree pos
, int packed
, int addressable
)
2272 tree field_decl
= build_decl (input_location
,
2273 FIELD_DECL
, field_name
, field_type
);
2275 DECL_CONTEXT (field_decl
) = record_type
;
2276 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2278 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2279 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2280 Likewise for an aggregate without specified position that contains an
2281 array, because in this case slices of variable length of this array
2282 must be handled by GCC and variable-sized objects need to be aligned
2283 to at least a byte boundary. */
2284 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2286 && AGGREGATE_TYPE_P (field_type
)
2287 && aggregate_type_contains_array_p (field_type
))))
2288 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2290 /* If a size is specified, use it. Otherwise, if the record type is packed
2291 compute a size to use, which may differ from the object's natural size.
2292 We always set a size in this case to trigger the checks for bitfield
2293 creation below, which is typically required when no position has been
2296 size
= convert (bitsizetype
, size
);
2297 else if (packed
== 1)
2299 size
= rm_size (field_type
);
2300 if (TYPE_MODE (field_type
) == BLKmode
)
2301 size
= round_up (size
, BITS_PER_UNIT
);
2304 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2305 specified for two reasons: first if the size differs from the natural
2306 size. Second, if the alignment is insufficient. There are a number of
2307 ways the latter can be true.
2309 We never make a bitfield if the type of the field has a nonconstant size,
2310 because no such entity requiring bitfield operations should reach here.
2312 We do *preventively* make a bitfield when there might be the need for it
2313 but we don't have all the necessary information to decide, as is the case
2314 of a field with no specified position in a packed record.
2316 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2317 in layout_decl or finish_record_type to clear the bit_field indication if
2318 it is in fact not needed. */
2319 if (addressable
>= 0
2321 && TREE_CODE (size
) == INTEGER_CST
2322 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2323 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2324 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2326 || (TYPE_ALIGN (record_type
) != 0
2327 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2329 DECL_BIT_FIELD (field_decl
) = 1;
2330 DECL_SIZE (field_decl
) = size
;
2331 if (!packed
&& !pos
)
2333 if (TYPE_ALIGN (record_type
) != 0
2334 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2335 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2337 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2341 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2343 /* Bump the alignment if need be, either for bitfield/packing purposes or
2344 to satisfy the type requirements if no such consideration applies. When
2345 we get the alignment from the type, indicate if this is from an explicit
2346 user request, which prevents stor-layout from lowering it later on. */
2348 unsigned int bit_align
2349 = (DECL_BIT_FIELD (field_decl
) ? 1
2350 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2352 if (bit_align
> DECL_ALIGN (field_decl
))
2353 DECL_ALIGN (field_decl
) = bit_align
;
2354 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2356 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2357 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2363 /* We need to pass in the alignment the DECL is known to have.
2364 This is the lowest-order bit set in POS, but no more than
2365 the alignment of the record, if one is specified. Note
2366 that an alignment of 0 is taken as infinite. */
2367 unsigned int known_align
;
2369 if (tree_fits_uhwi_p (pos
))
2370 known_align
= tree_to_uhwi (pos
) & - tree_to_uhwi (pos
);
2372 known_align
= BITS_PER_UNIT
;
2374 if (TYPE_ALIGN (record_type
)
2375 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2376 known_align
= TYPE_ALIGN (record_type
);
2378 layout_decl (field_decl
, known_align
);
2379 SET_DECL_OFFSET_ALIGN (field_decl
,
2380 tree_fits_uhwi_p (pos
) ? BIGGEST_ALIGNMENT
2382 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2383 &DECL_FIELD_BIT_OFFSET (field_decl
),
2384 DECL_OFFSET_ALIGN (field_decl
), pos
);
2387 /* In addition to what our caller says, claim the field is addressable if we
2388 know that its type is not suitable.
2390 The field may also be "technically" nonaddressable, meaning that even if
2391 we attempt to take the field's address we will actually get the address
2392 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2393 value we have at this point is not accurate enough, so we don't account
2394 for this here and let finish_record_type decide. */
2395 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2398 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2403 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2404 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2405 (either an In parameter or an address of a pass-by-ref parameter). */
2408 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2410 tree param_decl
= build_decl (input_location
,
2411 PARM_DECL
, param_name
, param_type
);
2413 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2414 can lead to various ABI violations. */
2415 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2416 && INTEGRAL_TYPE_P (param_type
)
2417 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2419 /* We have to be careful about biased types here. Make a subtype
2420 of integer_type_node with the proper biasing. */
2421 if (TREE_CODE (param_type
) == INTEGER_TYPE
2422 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2425 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2426 TREE_TYPE (subtype
) = integer_type_node
;
2427 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2428 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2429 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2430 param_type
= subtype
;
2433 param_type
= integer_type_node
;
2436 DECL_ARG_TYPE (param_decl
) = param_type
;
2437 TREE_READONLY (param_decl
) = readonly
;
2441 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2442 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2443 changed. GNAT_NODE is used for the position of error messages. */
2446 process_attributes (tree
*node
, struct attrib
**attr_list
, bool in_place
,
2449 struct attrib
*attr
;
2451 for (attr
= *attr_list
; attr
; attr
= attr
->next
)
2454 case ATTR_MACHINE_ATTRIBUTE
:
2455 Sloc_to_locus (Sloc (gnat_node
), &input_location
);
2456 decl_attributes (node
, tree_cons (attr
->name
, attr
->args
, NULL_TREE
),
2457 in_place
? ATTR_FLAG_TYPE_IN_PLACE
: 0);
2460 case ATTR_LINK_ALIAS
:
2461 if (!DECL_EXTERNAL (*node
))
2463 TREE_STATIC (*node
) = 1;
2464 assemble_alias (*node
, attr
->name
);
2468 case ATTR_WEAK_EXTERNAL
:
2470 declare_weak (*node
);
2472 post_error ("?weak declarations not supported on this target",
2476 case ATTR_LINK_SECTION
:
2477 if (targetm_common
.have_named_sections
)
2479 DECL_SECTION_NAME (*node
)
2480 = build_string (IDENTIFIER_LENGTH (attr
->name
),
2481 IDENTIFIER_POINTER (attr
->name
));
2482 DECL_COMMON (*node
) = 0;
2485 post_error ("?section attributes are not supported for this target",
2489 case ATTR_LINK_CONSTRUCTOR
:
2490 DECL_STATIC_CONSTRUCTOR (*node
) = 1;
2491 TREE_USED (*node
) = 1;
2494 case ATTR_LINK_DESTRUCTOR
:
2495 DECL_STATIC_DESTRUCTOR (*node
) = 1;
2496 TREE_USED (*node
) = 1;
2499 case ATTR_THREAD_LOCAL_STORAGE
:
2500 DECL_TLS_MODEL (*node
) = decl_default_tls_model (*node
);
2501 DECL_COMMON (*node
) = 0;
2508 /* Record DECL as a global renaming pointer. */
2511 record_global_renaming_pointer (tree decl
)
2513 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2514 vec_safe_push (global_renaming_pointers
, decl
);
2517 /* Invalidate the global renaming pointers that are not constant, lest their
2518 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2519 that we should not blindly invalidate everything here because of the need
2520 to propagate constant values through renaming. */
2523 invalidate_global_renaming_pointers (void)
2528 if (global_renaming_pointers
== NULL
)
2531 FOR_EACH_VEC_ELT (*global_renaming_pointers
, i
, iter
)
2532 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter
)))
2533 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2535 vec_free (global_renaming_pointers
);
2538 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2542 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2544 if (tree_fits_uhwi_p (value
))
2545 return tree_to_uhwi (value
) % factor
== 0;
2547 if (TREE_CODE (value
) == MULT_EXPR
)
2548 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2549 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2554 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2557 scale_by_factor_of (tree expr
, unsigned int value
)
2559 expr
= remove_conversions (expr
, true);
2561 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2562 corresponding to the number of trailing zeros of the mask. */
2563 if (TREE_CODE (expr
) == BIT_AND_EXPR
2564 && TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
)
2566 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1));
2569 while ((mask
& 1) == 0 && i
< HOST_BITS_PER_WIDE_INT
)
2580 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2581 unless we can prove these 2 fields are laid out in such a way that no gap
2582 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2583 is the distance in bits between the end of PREV_FIELD and the starting
2584 position of CURR_FIELD. It is ignored if null. */
2587 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2589 /* If this is the first field of the record, there cannot be any gap */
2593 /* If the previous field is a union type, then return false: The only
2594 time when such a field is not the last field of the record is when
2595 there are other components at fixed positions after it (meaning there
2596 was a rep clause for every field), in which case we don't want the
2597 alignment constraint to override them. */
2598 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2601 /* If the distance between the end of prev_field and the beginning of
2602 curr_field is constant, then there is a gap if the value of this
2603 constant is not null. */
2604 if (offset
&& tree_fits_uhwi_p (offset
))
2605 return !integer_zerop (offset
);
2607 /* If the size and position of the previous field are constant,
2608 then check the sum of this size and position. There will be a gap
2609 iff it is not multiple of the current field alignment. */
2610 if (tree_fits_uhwi_p (DECL_SIZE (prev_field
))
2611 && tree_fits_uhwi_p (bit_position (prev_field
)))
2612 return ((tree_to_uhwi (bit_position (prev_field
))
2613 + tree_to_uhwi (DECL_SIZE (prev_field
)))
2614 % DECL_ALIGN (curr_field
) != 0);
2616 /* If both the position and size of the previous field are multiples
2617 of the current field alignment, there cannot be any gap. */
2618 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2619 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2622 /* Fallback, return that there may be a potential gap */
2626 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2630 create_label_decl (tree label_name
, Node_Id gnat_node
)
2633 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2635 DECL_MODE (label_decl
) = VOIDmode
;
2637 /* Add this decl to the current binding level. */
2638 gnat_pushdecl (label_decl
, gnat_node
);
2643 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2644 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2645 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2646 PARM_DECL nodes chained through the DECL_CHAIN field).
2648 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2649 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2650 used for the position of the decl. */
2653 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2654 tree param_decl_list
, enum inline_status_t inline_status
,
2655 bool public_flag
, bool extern_flag
, bool artificial_flag
,
2656 struct attrib
*attr_list
, Node_Id gnat_node
)
2658 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2660 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2661 TREE_TYPE (subprog_type
));
2662 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2664 /* If this is a non-inline function nested inside an inlined external
2665 function, we cannot honor both requests without cloning the nested
2666 function in the current unit since it is private to the other unit.
2667 We could inline the nested function as well but it's probably better
2668 to err on the side of too little inlining. */
2669 if (inline_status
!= is_enabled
2671 && current_function_decl
2672 && DECL_DECLARED_INLINE_P (current_function_decl
)
2673 && DECL_EXTERNAL (current_function_decl
))
2674 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2676 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2677 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2679 switch (inline_status
)
2682 DECL_UNINLINABLE (subprog_decl
) = 1;
2689 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
2690 DECL_NO_INLINE_WARNING_P (subprog_decl
) = artificial_flag
;
2697 TREE_PUBLIC (subprog_decl
) = public_flag
;
2698 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2699 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2700 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2702 DECL_ARTIFICIAL (result_decl
) = 1;
2703 DECL_IGNORED_P (result_decl
) = 1;
2704 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2705 DECL_RESULT (subprog_decl
) = result_decl
;
2709 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2711 /* The expand_main_function circuitry expects "main_identifier_node" to
2712 designate the DECL_NAME of the 'main' entry point, in turn expected
2713 to be declared as the "main" function literally by default. Ada
2714 program entry points are typically declared with a different name
2715 within the binder generated file, exported as 'main' to satisfy the
2716 system expectations. Force main_identifier_node in this case. */
2717 if (asm_name
== main_identifier_node
)
2718 DECL_NAME (subprog_decl
) = main_identifier_node
;
2721 process_attributes (&subprog_decl
, &attr_list
, true, gnat_node
);
2723 /* Add this decl to the current binding level. */
2724 gnat_pushdecl (subprog_decl
, gnat_node
);
2726 /* Output the assembler code and/or RTL for the declaration. */
2727 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2729 return subprog_decl
;
2732 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2733 body. This routine needs to be invoked before processing the declarations
2734 appearing in the subprogram. */
2737 begin_subprog_body (tree subprog_decl
)
2741 announce_function (subprog_decl
);
2743 /* This function is being defined. */
2744 TREE_STATIC (subprog_decl
) = 1;
2746 current_function_decl
= subprog_decl
;
2748 /* Enter a new binding level and show that all the parameters belong to
2752 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2753 param_decl
= DECL_CHAIN (param_decl
))
2754 DECL_CONTEXT (param_decl
) = subprog_decl
;
2756 make_decl_rtl (subprog_decl
);
2759 /* Finish translating the current subprogram and set its BODY. */
2762 end_subprog_body (tree body
)
2764 tree fndecl
= current_function_decl
;
2766 /* Attach the BLOCK for this level to the function and pop the level. */
2767 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2768 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2771 /* Mark the RESULT_DECL as being in this subprogram. */
2772 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2774 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2775 if (TREE_CODE (body
) == BIND_EXPR
)
2777 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2778 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2781 DECL_SAVED_TREE (fndecl
) = body
;
2783 current_function_decl
= decl_function_context (fndecl
);
2786 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2789 rest_of_subprog_body_compilation (tree subprog_decl
)
2791 /* We cannot track the location of errors past this point. */
2792 error_gnat_node
= Empty
;
2794 /* If we're only annotating types, don't actually compile this function. */
2795 if (type_annotate_only
)
2798 /* Dump functions before gimplification. */
2799 dump_function (TDI_original
, subprog_decl
);
2801 if (!decl_function_context (subprog_decl
))
2802 cgraph_finalize_function (subprog_decl
, false);
2804 /* Register this function with cgraph just far enough to get it
2805 added to our parent's nested function list. */
2806 (void) cgraph_get_create_node (subprog_decl
);
2810 gnat_builtin_function (tree decl
)
2812 gnat_pushdecl (decl
, Empty
);
2816 /* Return an integer type with the number of bits of precision given by
2817 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2818 it is a signed type. */
2821 gnat_type_for_size (unsigned precision
, int unsignedp
)
2826 if (precision
<= 2 * MAX_BITS_PER_WORD
2827 && signed_and_unsigned_types
[precision
][unsignedp
])
2828 return signed_and_unsigned_types
[precision
][unsignedp
];
2831 t
= make_unsigned_type (precision
);
2833 t
= make_signed_type (precision
);
2835 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2836 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2840 sprintf (type_name
, "%sSIGNED_%u", unsignedp
? "UN" : "", precision
);
2841 TYPE_NAME (t
) = get_identifier (type_name
);
2847 /* Likewise for floating-point types. */
2850 float_type_for_precision (int precision
, enum machine_mode mode
)
2855 if (float_types
[(int) mode
])
2856 return float_types
[(int) mode
];
2858 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2859 TYPE_PRECISION (t
) = precision
;
2862 gcc_assert (TYPE_MODE (t
) == mode
);
2865 sprintf (type_name
, "FLOAT_%d", precision
);
2866 TYPE_NAME (t
) = get_identifier (type_name
);
2872 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2873 an unsigned type; otherwise a signed type is returned. */
2876 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2878 if (mode
== BLKmode
)
2881 if (mode
== VOIDmode
)
2882 return void_type_node
;
2884 if (COMPLEX_MODE_P (mode
))
2887 if (SCALAR_FLOAT_MODE_P (mode
))
2888 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2890 if (SCALAR_INT_MODE_P (mode
))
2891 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2893 if (VECTOR_MODE_P (mode
))
2895 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2896 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2898 return build_vector_type_for_mode (inner_type
, mode
);
2904 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2907 gnat_unsigned_type (tree type_node
)
2909 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2911 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2913 type
= copy_node (type
);
2914 TREE_TYPE (type
) = type_node
;
2916 else if (TREE_TYPE (type_node
)
2917 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2918 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2920 type
= copy_node (type
);
2921 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2927 /* Return the signed version of a TYPE_NODE, a scalar type. */
2930 gnat_signed_type (tree type_node
)
2932 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2934 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2936 type
= copy_node (type
);
2937 TREE_TYPE (type
) = type_node
;
2939 else if (TREE_TYPE (type_node
)
2940 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2941 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2943 type
= copy_node (type
);
2944 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2950 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2951 transparently converted to each other. */
2954 gnat_types_compatible_p (tree t1
, tree t2
)
2956 enum tree_code code
;
2958 /* This is the default criterion. */
2959 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2962 /* We only check structural equivalence here. */
2963 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2966 /* Vector types are also compatible if they have the same number of subparts
2967 and the same form of (scalar) element type. */
2968 if (code
== VECTOR_TYPE
2969 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2970 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2971 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2974 /* Array types are also compatible if they are constrained and have the same
2975 domain(s) and the same component type. */
2976 if (code
== ARRAY_TYPE
2977 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2978 || (TYPE_DOMAIN (t1
)
2980 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2981 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2982 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2983 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2984 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2985 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2986 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2992 /* Return true if EXPR is a useless type conversion. */
2995 gnat_useless_type_conversion (tree expr
)
2997 if (CONVERT_EXPR_P (expr
)
2998 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2999 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3000 return gnat_types_compatible_p (TREE_TYPE (expr
),
3001 TREE_TYPE (TREE_OPERAND (expr
, 0)));
3006 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3009 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
3010 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
3012 return TYPE_CI_CO_LIST (t
) == cico_list
3013 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
3014 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
3015 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
3018 /* EXP is an expression for the size of an object. If this size contains
3019 discriminant references, replace them with the maximum (if MAX_P) or
3020 minimum (if !MAX_P) possible value of the discriminant. */
3023 max_size (tree exp
, bool max_p
)
3025 enum tree_code code
= TREE_CODE (exp
);
3026 tree type
= TREE_TYPE (exp
);
3028 switch (TREE_CODE_CLASS (code
))
3030 case tcc_declaration
:
3035 if (code
== CALL_EXPR
)
3040 t
= maybe_inline_call_in_expr (exp
);
3042 return max_size (t
, max_p
);
3044 n
= call_expr_nargs (exp
);
3046 argarray
= XALLOCAVEC (tree
, n
);
3047 for (i
= 0; i
< n
; i
++)
3048 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
3049 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3054 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3055 modify. Otherwise, we treat it like a variable. */
3056 if (!CONTAINS_PLACEHOLDER_P (exp
))
3059 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3061 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3063 case tcc_comparison
:
3064 return max_p
? size_one_node
: size_zero_node
;
3067 if (code
== NON_LVALUE_EXPR
)
3068 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3070 return fold_build1 (code
, type
,
3071 max_size (TREE_OPERAND (exp
, 0),
3072 code
== NEGATE_EXPR
? !max_p
: max_p
));
3076 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3077 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3078 code
== MINUS_EXPR
? !max_p
: max_p
);
3080 /* Special-case wanting the maximum value of a MIN_EXPR.
3081 In that case, if one side overflows, return the other. */
3082 if (max_p
&& code
== MIN_EXPR
)
3084 if (TREE_CODE (rhs
) == INTEGER_CST
&& TREE_OVERFLOW (rhs
))
3087 if (TREE_CODE (lhs
) == INTEGER_CST
&& TREE_OVERFLOW (lhs
))
3091 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3092 overflowing and the RHS a variable. */
3093 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3094 && TREE_CODE (lhs
) == INTEGER_CST
3095 && TREE_OVERFLOW (lhs
)
3096 && !TREE_CONSTANT (rhs
))
3099 return size_binop (code
, lhs
, rhs
);
3102 case tcc_expression
:
3103 switch (TREE_CODE_LENGTH (code
))
3106 if (code
== SAVE_EXPR
)
3109 return fold_build1 (code
, type
,
3110 max_size (TREE_OPERAND (exp
, 0), max_p
));
3113 if (code
== COMPOUND_EXPR
)
3114 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3116 return fold_build2 (code
, type
,
3117 max_size (TREE_OPERAND (exp
, 0), max_p
),
3118 max_size (TREE_OPERAND (exp
, 1), max_p
));
3121 if (code
== COND_EXPR
)
3122 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3123 max_size (TREE_OPERAND (exp
, 1), max_p
),
3124 max_size (TREE_OPERAND (exp
, 2), max_p
));
3130 /* Other tree classes cannot happen. */
3138 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3139 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3140 Return a constructor for the template. */
3143 build_template (tree template_type
, tree array_type
, tree expr
)
3145 vec
<constructor_elt
, va_gc
> *template_elts
= NULL
;
3146 tree bound_list
= NULL_TREE
;
3149 while (TREE_CODE (array_type
) == RECORD_TYPE
3150 && (TYPE_PADDING_P (array_type
)
3151 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3152 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3154 if (TREE_CODE (array_type
) == ARRAY_TYPE
3155 || (TREE_CODE (array_type
) == INTEGER_TYPE
3156 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3157 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3159 /* First make the list for a CONSTRUCTOR for the template. Go down the
3160 field list of the template instead of the type chain because this
3161 array might be an Ada array of arrays and we can't tell where the
3162 nested arrays stop being the underlying object. */
3164 for (field
= TYPE_FIELDS (template_type
); field
;
3166 ? (bound_list
= TREE_CHAIN (bound_list
))
3167 : (array_type
= TREE_TYPE (array_type
))),
3168 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3170 tree bounds
, min
, max
;
3172 /* If we have a bound list, get the bounds from there. Likewise
3173 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3174 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3175 This will give us a maximum range. */
3177 bounds
= TREE_VALUE (bound_list
);
3178 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3179 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3180 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3181 && DECL_BY_COMPONENT_PTR_P (expr
))
3182 bounds
= TREE_TYPE (field
);
3186 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3187 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3189 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3190 substitute it from OBJECT. */
3191 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3192 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3194 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3195 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3198 return gnat_build_constructor (template_type
, template_elts
);
3201 /* Return true if TYPE is suitable for the element type of a vector. */
3204 type_for_vector_element_p (tree type
)
3206 enum machine_mode mode
;
3208 if (!INTEGRAL_TYPE_P (type
)
3209 && !SCALAR_FLOAT_TYPE_P (type
)
3210 && !FIXED_POINT_TYPE_P (type
))
3213 mode
= TYPE_MODE (type
);
3214 if (GET_MODE_CLASS (mode
) != MODE_INT
3215 && !SCALAR_FLOAT_MODE_P (mode
)
3216 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode
))
3222 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3223 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3224 attribute declaration and want to issue error messages on failure. */
3227 build_vector_type_for_size (tree inner_type
, tree size
, tree attribute
)
3229 unsigned HOST_WIDE_INT size_int
, inner_size_int
;
3232 /* Silently punt on variable sizes. We can't make vector types for them,
3233 need to ignore them on front-end generated subtypes of unconstrained
3234 base types, and this attribute is for binding implementors, not end
3235 users, so we should never get there from legitimate explicit uses. */
3236 if (!tree_fits_uhwi_p (size
))
3238 size_int
= tree_to_uhwi (size
);
3240 if (!type_for_vector_element_p (inner_type
))
3243 error ("invalid element type for attribute %qs",
3244 IDENTIFIER_POINTER (attribute
));
3247 inner_size_int
= tree_to_uhwi (TYPE_SIZE_UNIT (inner_type
));
3249 if (size_int
% inner_size_int
)
3252 error ("vector size not an integral multiple of component size");
3259 error ("zero vector size");
3263 nunits
= size_int
/ inner_size_int
;
3264 if (nunits
& (nunits
- 1))
3267 error ("number of components of vector not a power of two");
3271 return build_vector_type (inner_type
, nunits
);
3274 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3275 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3276 processing the attribute and want to issue error messages on failure. */
3279 build_vector_type_for_array (tree array_type
, tree attribute
)
3281 tree vector_type
= build_vector_type_for_size (TREE_TYPE (array_type
),
3282 TYPE_SIZE_UNIT (array_type
),
3287 TYPE_REPRESENTATIVE_ARRAY (vector_type
) = array_type
;
3291 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3292 being built; the new decl is chained on to the front of the list. */
3295 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3296 tree initial
, tree field_list
)
3299 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3302 DECL_INITIAL (field
) = initial
;
3303 DECL_CHAIN (field
) = field_list
;
3307 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3308 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3309 type contains in its DECL_INITIAL the expression to use when a constructor
3310 is made for the type. GNAT_ENTITY is an entity used to print out an error
3311 message if the mechanism cannot be applied to an object of that type and
3312 also for the name. */
3315 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3317 tree record_type
= make_node (RECORD_TYPE
);
3318 tree pointer32_type
, pointer64_type
;
3319 tree field_list
= NULL_TREE
;
3320 int klass
, ndim
, i
, dtype
= 0;
3321 tree inner_type
, tem
;
3324 /* If TYPE is an unconstrained array, use the underlying array type. */
3325 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3326 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3328 /* If this is an array, compute the number of dimensions in the array,
3329 get the index types, and point to the inner type. */
3330 if (TREE_CODE (type
) != ARRAY_TYPE
)
3333 for (ndim
= 1, inner_type
= type
;
3334 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3335 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3336 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3339 idx_arr
= XALLOCAVEC (tree
, ndim
);
3341 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3342 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3343 for (i
= ndim
- 1, inner_type
= type
;
3345 i
--, inner_type
= TREE_TYPE (inner_type
))
3346 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3348 for (i
= 0, inner_type
= type
;
3350 i
++, inner_type
= TREE_TYPE (inner_type
))
3351 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3353 /* Now get the DTYPE value. */
3354 switch (TREE_CODE (type
))
3359 if (TYPE_VAX_FLOATING_POINT_P (type
))
3360 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3373 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3376 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3379 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3382 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3385 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3388 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3394 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3398 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3399 && TYPE_VAX_FLOATING_POINT_P (type
))
3400 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3412 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3423 /* Get the CLASS value. */
3426 case By_Descriptor_A
:
3427 case By_Short_Descriptor_A
:
3430 case By_Descriptor_NCA
:
3431 case By_Short_Descriptor_NCA
:
3434 case By_Descriptor_SB
:
3435 case By_Short_Descriptor_SB
:
3439 case By_Short_Descriptor
:
3440 case By_Descriptor_S
:
3441 case By_Short_Descriptor_S
:
3447 /* Make the type for a descriptor for VMS. The first four fields are the
3448 same for all types. */
3450 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3451 size_in_bytes ((mech
== By_Descriptor_A
3452 || mech
== By_Short_Descriptor_A
)
3453 ? inner_type
: type
),
3456 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3457 size_int (dtype
), field_list
);
3459 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3460 size_int (klass
), field_list
);
3462 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3463 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3465 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3466 that we cannot build a template call to the CE routine as it would get a
3467 wrong source location; instead we use a second placeholder for it. */
3468 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3469 build0 (PLACEHOLDER_EXPR
, type
));
3470 tem
= build3 (COND_EXPR
, pointer32_type
,
3472 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3473 build_int_cstu (pointer64_type
, 0x80000000))
3474 : boolean_false_node
,
3475 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3476 convert (pointer32_type
, tem
));
3479 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3485 case By_Short_Descriptor
:
3486 case By_Descriptor_S
:
3487 case By_Short_Descriptor_S
:
3490 case By_Descriptor_SB
:
3491 case By_Short_Descriptor_SB
:
3493 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3495 (TREE_CODE (type
) == ARRAY_TYPE
3496 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3500 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3502 (TREE_CODE (type
) == ARRAY_TYPE
3503 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3508 case By_Descriptor_A
:
3509 case By_Short_Descriptor_A
:
3510 case By_Descriptor_NCA
:
3511 case By_Short_Descriptor_NCA
:
3513 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3514 record_type
, size_zero_node
, field_list
);
3517 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3518 record_type
, size_zero_node
, field_list
);
3521 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3523 size_int ((mech
== By_Descriptor_NCA
3524 || mech
== By_Short_Descriptor_NCA
)
3526 /* Set FL_COLUMN, FL_COEFF, and
3528 : (TREE_CODE (type
) == ARRAY_TYPE
3529 && TYPE_CONVENTION_FORTRAN_P
3535 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3536 record_type
, size_int (ndim
), field_list
);
3539 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3540 record_type
, size_in_bytes (type
),
3543 /* Now build a pointer to the 0,0,0... element. */
3544 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3545 for (i
= 0, inner_type
= type
; i
< ndim
;
3546 i
++, inner_type
= TREE_TYPE (inner_type
))
3547 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3548 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3549 NULL_TREE
, NULL_TREE
);
3552 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3553 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3556 /* Next come the addressing coefficients. */
3557 tem
= size_one_node
;
3558 for (i
= 0; i
< ndim
; i
++)
3562 = size_binop (MULT_EXPR
, tem
,
3563 size_binop (PLUS_EXPR
,
3564 size_binop (MINUS_EXPR
,
3565 TYPE_MAX_VALUE (idx_arr
[i
]),
3566 TYPE_MIN_VALUE (idx_arr
[i
])),
3569 fname
[0] = ((mech
== By_Descriptor_NCA
||
3570 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3571 fname
[1] = '0' + i
, fname
[2] = 0;
3573 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3574 record_type
, idx_length
, field_list
);
3576 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3580 /* Finally here are the bounds. */
3581 for (i
= 0; i
< ndim
; i
++)
3585 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3587 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3588 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3593 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3594 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3600 post_error ("unsupported descriptor type for &", gnat_entity
);
3603 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3604 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3608 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3609 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3610 type contains in its DECL_INITIAL the expression to use when a constructor
3611 is made for the type. GNAT_ENTITY is an entity used to print out an error
3612 message if the mechanism cannot be applied to an object of that type and
3613 also for the name. */
3616 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3618 tree record_type
= make_node (RECORD_TYPE
);
3619 tree pointer64_type
;
3620 tree field_list
= NULL_TREE
;
3621 int klass
, ndim
, i
, dtype
= 0;
3622 tree inner_type
, tem
;
3625 /* If TYPE is an unconstrained array, use the underlying array type. */
3626 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3627 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3629 /* If this is an array, compute the number of dimensions in the array,
3630 get the index types, and point to the inner type. */
3631 if (TREE_CODE (type
) != ARRAY_TYPE
)
3634 for (ndim
= 1, inner_type
= type
;
3635 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3636 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3637 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3640 idx_arr
= XALLOCAVEC (tree
, ndim
);
3642 if (mech
!= By_Descriptor_NCA
3643 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3644 for (i
= ndim
- 1, inner_type
= type
;
3646 i
--, inner_type
= TREE_TYPE (inner_type
))
3647 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3649 for (i
= 0, inner_type
= type
;
3651 i
++, inner_type
= TREE_TYPE (inner_type
))
3652 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3654 /* Now get the DTYPE value. */
3655 switch (TREE_CODE (type
))
3660 if (TYPE_VAX_FLOATING_POINT_P (type
))
3661 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3674 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3677 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3680 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3683 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3686 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3689 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3695 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3699 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3700 && TYPE_VAX_FLOATING_POINT_P (type
))
3701 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3713 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3724 /* Get the CLASS value. */
3727 case By_Descriptor_A
:
3730 case By_Descriptor_NCA
:
3733 case By_Descriptor_SB
:
3737 case By_Descriptor_S
:
3743 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3744 are the same for all types. */
3746 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3747 record_type
, size_int (1), field_list
);
3749 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3750 record_type
, size_int (dtype
), field_list
);
3752 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3753 record_type
, size_int (klass
), field_list
);
3755 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3756 record_type
, size_int (-1), field_list
);
3758 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3760 size_in_bytes (mech
== By_Descriptor_A
3761 ? inner_type
: type
),
3764 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3767 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3768 build_unary_op (ADDR_EXPR
, pointer64_type
,
3769 build0 (PLACEHOLDER_EXPR
, type
)),
3775 case By_Descriptor_S
:
3778 case By_Descriptor_SB
:
3780 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3782 (TREE_CODE (type
) == ARRAY_TYPE
3783 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3787 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3789 (TREE_CODE (type
) == ARRAY_TYPE
3790 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3795 case By_Descriptor_A
:
3796 case By_Descriptor_NCA
:
3798 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3799 record_type
, size_zero_node
, field_list
);
3802 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3803 record_type
, size_zero_node
, field_list
);
3805 dtype
= (mech
== By_Descriptor_NCA
3807 /* Set FL_COLUMN, FL_COEFF, and
3809 : (TREE_CODE (type
) == ARRAY_TYPE
3810 && TYPE_CONVENTION_FORTRAN_P (type
)
3813 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3814 record_type
, size_int (dtype
),
3818 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3819 record_type
, size_int (ndim
), field_list
);
3822 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3823 record_type
, size_int (0), field_list
);
3825 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3826 record_type
, size_in_bytes (type
),
3829 /* Now build a pointer to the 0,0,0... element. */
3830 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3831 for (i
= 0, inner_type
= type
; i
< ndim
;
3832 i
++, inner_type
= TREE_TYPE (inner_type
))
3833 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3834 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3835 NULL_TREE
, NULL_TREE
);
3838 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3839 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3842 /* Next come the addressing coefficients. */
3843 tem
= size_one_node
;
3844 for (i
= 0; i
< ndim
; i
++)
3848 = size_binop (MULT_EXPR
, tem
,
3849 size_binop (PLUS_EXPR
,
3850 size_binop (MINUS_EXPR
,
3851 TYPE_MAX_VALUE (idx_arr
[i
]),
3852 TYPE_MIN_VALUE (idx_arr
[i
])),
3855 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3856 fname
[1] = '0' + i
, fname
[2] = 0;
3858 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3859 record_type
, idx_length
, field_list
);
3861 if (mech
== By_Descriptor_NCA
)
3865 /* Finally here are the bounds. */
3866 for (i
= 0; i
< ndim
; i
++)
3870 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3872 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3874 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3878 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3880 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3885 post_error ("unsupported descriptor type for &", gnat_entity
);
3888 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3889 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3893 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3894 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3897 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3899 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3902 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3903 gnu_expr
= gnat_protect_expr (gnu_expr
);
3904 gnat_mark_addressable (gnu_expr
);
3906 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3907 routine in case we have a 32-bit descriptor. */
3908 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3909 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3910 N_Raise_Constraint_Error
),
3913 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3916 = convert (TREE_TYPE (field
),
3917 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3919 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3922 return gnat_build_constructor (gnu_type
, v
);
3925 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3926 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3927 which the VMS descriptor is passed. */
3930 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3932 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3933 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3934 /* The CLASS field is the 3rd field in the descriptor. */
3935 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3936 /* The POINTER field is the 6th field in the descriptor. */
3937 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3939 /* Retrieve the value of the POINTER field. */
3941 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3943 if (POINTER_TYPE_P (gnu_type
))
3944 return convert (gnu_type
, gnu_expr64
);
3946 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3948 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3949 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3950 tree template_type
= TREE_TYPE (p_bounds_type
);
3951 tree min_field
= TYPE_FIELDS (template_type
);
3952 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3953 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3954 /* See the head comment of build_vms_descriptor. */
3955 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3956 tree lfield
, ufield
;
3957 vec
<constructor_elt
, va_gc
> *v
;
3959 /* Convert POINTER to the pointer-to-array type. */
3960 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3964 case 1: /* Class S */
3965 case 15: /* Class SB */
3966 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3968 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3969 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3970 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3971 convert (TREE_TYPE (min_field
),
3973 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3974 convert (TREE_TYPE (max_field
), t
));
3975 template_tree
= gnat_build_constructor (template_type
, v
);
3976 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3978 /* For class S, we are done. */
3982 /* Test that we really have a SB descriptor, like DEC Ada. */
3983 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3984 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3985 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3986 /* If so, there is already a template in the descriptor and
3987 it is located right after the POINTER field. The fields are
3988 64bits so they must be repacked. */
3989 t
= DECL_CHAIN (pointer
);
3990 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3991 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3994 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3996 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3998 /* Build the template in the form of a constructor. */
4000 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
4001 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
4003 template_tree
= gnat_build_constructor (template_type
, v
);
4005 /* Otherwise use the {1, LENGTH} template we build above. */
4006 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
4007 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4012 case 4: /* Class A */
4013 /* The AFLAGS field is the 3rd field after the pointer in the
4015 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4016 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4017 /* The DIMCT field is the next field in the descriptor after
4020 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4021 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4022 or FL_COEFF or FL_BOUNDS not set. */
4023 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4024 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4025 build_binary_op (NE_EXPR
, boolean_type_node
,
4027 convert (TREE_TYPE (dimct
),
4029 build_binary_op (NE_EXPR
, boolean_type_node
,
4030 build2 (BIT_AND_EXPR
,
4034 /* There is already a template in the descriptor and it is located
4035 in block 3. The fields are 64bits so they must be repacked. */
4036 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
4038 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4039 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
4042 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4044 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
4046 /* Build the template in the form of a constructor. */
4048 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
4049 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
4051 template_tree
= gnat_build_constructor (template_type
, v
);
4052 template_tree
= build3 (COND_EXPR
, template_type
, u
,
4053 build_call_raise (CE_Length_Check_Failed
, Empty
,
4054 N_Raise_Constraint_Error
),
4057 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4060 case 10: /* Class NCA */
4062 post_error ("unsupported descriptor type for &", gnat_subprog
);
4063 template_addr
= integer_zero_node
;
4067 /* Build the fat pointer in the form of a constructor. */
4069 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
4070 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4072 return gnat_build_constructor (gnu_type
, v
);
4079 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4080 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4081 which the VMS descriptor is passed. */
4084 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
4086 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4087 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4088 /* The CLASS field is the 3rd field in the descriptor. */
4089 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
4090 /* The POINTER field is the 4th field in the descriptor. */
4091 tree pointer
= DECL_CHAIN (klass
);
4093 /* Retrieve the value of the POINTER field. */
4095 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
4097 if (POINTER_TYPE_P (gnu_type
))
4098 return convert (gnu_type
, gnu_expr32
);
4100 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
4102 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
4103 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
4104 tree template_type
= TREE_TYPE (p_bounds_type
);
4105 tree min_field
= TYPE_FIELDS (template_type
);
4106 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
4107 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
4108 /* See the head comment of build_vms_descriptor. */
4109 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
4110 vec
<constructor_elt
, va_gc
> *v
;
4112 /* Convert POINTER to the pointer-to-array type. */
4113 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
4117 case 1: /* Class S */
4118 case 15: /* Class SB */
4119 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4121 t
= TYPE_FIELDS (desc_type
);
4122 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4123 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
4124 convert (TREE_TYPE (min_field
),
4126 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
4127 convert (TREE_TYPE (max_field
), t
));
4128 template_tree
= gnat_build_constructor (template_type
, v
);
4129 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
4131 /* For class S, we are done. */
4135 /* Test that we really have a SB descriptor, like DEC Ada. */
4136 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
4137 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
4138 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
4139 /* If so, there is already a template in the descriptor and
4140 it is located right after the POINTER field. */
4141 t
= DECL_CHAIN (pointer
);
4143 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4144 /* Otherwise use the {1, LENGTH} template we build above. */
4145 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
4146 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4151 case 4: /* Class A */
4152 /* The AFLAGS field is the 7th field in the descriptor. */
4153 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4154 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4155 /* The DIMCT field is the 8th field in the descriptor. */
4157 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4158 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4159 or FL_COEFF or FL_BOUNDS not set. */
4160 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4161 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4162 build_binary_op (NE_EXPR
, boolean_type_node
,
4164 convert (TREE_TYPE (dimct
),
4166 build_binary_op (NE_EXPR
, boolean_type_node
,
4167 build2 (BIT_AND_EXPR
,
4171 /* There is already a template in the descriptor and it is
4172 located at the start of block 3 (12th field). */
4173 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4175 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4176 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4177 build_call_raise (CE_Length_Check_Failed
, Empty
,
4178 N_Raise_Constraint_Error
),
4181 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4184 case 10: /* Class NCA */
4186 post_error ("unsupported descriptor type for &", gnat_subprog
);
4187 template_addr
= integer_zero_node
;
4191 /* Build the fat pointer in the form of a constructor. */
4193 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4194 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4197 return gnat_build_constructor (gnu_type
, v
);
4204 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4205 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4206 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4207 descriptor is passed. */
4210 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4211 Entity_Id gnat_subprog
)
4213 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4214 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4215 tree mbo
= TYPE_FIELDS (desc_type
);
4216 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4217 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4218 tree is64bit
, gnu_expr32
, gnu_expr64
;
4220 /* If the field name is not MBO, it must be 32-bit and no alternate.
4221 Otherwise primary must be 64-bit and alternate 32-bit. */
4222 if (strcmp (mbostr
, "MBO") != 0)
4224 tree ret
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4228 /* Build the test for 64-bit descriptor. */
4229 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4230 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4232 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4233 build_binary_op (EQ_EXPR
, boolean_type_node
,
4234 convert (integer_type_node
, mbo
),
4236 build_binary_op (EQ_EXPR
, boolean_type_node
,
4237 convert (integer_type_node
, mbmo
),
4238 integer_minus_one_node
));
4240 /* Build the 2 possible end results. */
4241 gnu_expr64
= convert_vms_descriptor64 (gnu_type
, gnu_expr
, gnat_subprog
);
4242 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4243 gnu_expr32
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4244 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4247 /* Build a type to be used to represent an aliased object whose nominal type
4248 is an unconstrained array. This consists of a RECORD_TYPE containing a
4249 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4250 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4251 an arbitrary unconstrained object. Use NAME as the name of the record.
4252 DEBUG_INFO_P is true if we need to write debug information for the type. */
4255 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4258 tree type
= make_node (RECORD_TYPE
);
4260 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4261 NULL_TREE
, NULL_TREE
, 0, 1);
4263 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4264 NULL_TREE
, NULL_TREE
, 0, 1);
4266 TYPE_NAME (type
) = name
;
4267 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4268 DECL_CHAIN (template_field
) = array_field
;
4269 finish_record_type (type
, template_field
, 0, true);
4271 /* Declare it now since it will never be declared otherwise. This is
4272 necessary to ensure that its subtrees are properly marked. */
4273 create_type_decl (name
, type
, true, debug_info_p
, Empty
);
4278 /* Same, taking a thin or fat pointer type instead of a template type. */
4281 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4282 tree name
, bool debug_info_p
)
4286 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4289 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4290 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4291 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4294 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4297 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4298 In the normal case this is just two adjustments, but we have more to
4299 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4302 update_pointer_to (tree old_type
, tree new_type
)
4304 tree ptr
= TYPE_POINTER_TO (old_type
);
4305 tree ref
= TYPE_REFERENCE_TO (old_type
);
4308 /* If this is the main variant, process all the other variants first. */
4309 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4310 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4311 update_pointer_to (t
, new_type
);
4313 /* If no pointers and no references, we are done. */
4317 /* Merge the old type qualifiers in the new type.
4319 Each old variant has qualifiers for specific reasons, and the new
4320 designated type as well. Each set of qualifiers represents useful
4321 information grabbed at some point, and merging the two simply unifies
4322 these inputs into the final type description.
4324 Consider for instance a volatile type frozen after an access to constant
4325 type designating it; after the designated type's freeze, we get here with
4326 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4327 when the access type was processed. We will make a volatile and readonly
4328 designated type, because that's what it really is.
4330 We might also get here for a non-dummy OLD_TYPE variant with different
4331 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4332 to private record type elaboration (see the comments around the call to
4333 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4334 the qualifiers in those cases too, to avoid accidentally discarding the
4335 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4337 = build_qualified_type (new_type
,
4338 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4340 /* If old type and new type are identical, there is nothing to do. */
4341 if (old_type
== new_type
)
4344 /* Otherwise, first handle the simple case. */
4345 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4347 tree new_ptr
, new_ref
;
4349 /* If pointer or reference already points to new type, nothing to do.
4350 This can happen as update_pointer_to can be invoked multiple times
4351 on the same couple of types because of the type variants. */
4352 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4353 || (ref
&& TREE_TYPE (ref
) == new_type
))
4356 /* Chain PTR and its variants at the end. */
4357 new_ptr
= TYPE_POINTER_TO (new_type
);
4360 while (TYPE_NEXT_PTR_TO (new_ptr
))
4361 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4362 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4365 TYPE_POINTER_TO (new_type
) = ptr
;
4367 /* Now adjust them. */
4368 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4369 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4371 TREE_TYPE (t
) = new_type
;
4372 if (TYPE_NULL_BOUNDS (t
))
4373 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4376 /* Chain REF and its variants at the end. */
4377 new_ref
= TYPE_REFERENCE_TO (new_type
);
4380 while (TYPE_NEXT_REF_TO (new_ref
))
4381 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4382 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4385 TYPE_REFERENCE_TO (new_type
) = ref
;
4387 /* Now adjust them. */
4388 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4389 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4390 TREE_TYPE (t
) = new_type
;
4392 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4393 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4396 /* Now deal with the unconstrained array case. In this case the pointer
4397 is actually a record where both fields are pointers to dummy nodes.
4398 Turn them into pointers to the correct types using update_pointer_to.
4399 Likewise for the pointer to the object record (thin pointer). */
4402 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4404 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4406 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4407 since update_pointer_to can be invoked multiple times on the same
4408 couple of types because of the type variants. */
4409 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4413 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4414 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4417 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4418 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4420 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4421 TYPE_OBJECT_RECORD_TYPE (new_type
));
4423 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4427 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4428 unconstrained one. This involves making or finding a template. */
4431 convert_to_fat_pointer (tree type
, tree expr
)
4433 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4434 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4435 tree etype
= TREE_TYPE (expr
);
4437 vec
<constructor_elt
, va_gc
> *v
;
4440 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4441 array (compare_fat_pointers ensures that this is the full discriminant)
4442 and a valid pointer to the bounds. This latter property is necessary
4443 since the compiler can hoist the load of the bounds done through it. */
4444 if (integer_zerop (expr
))
4446 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4447 tree null_bounds
, t
;
4449 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4450 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4453 /* The template type can still be dummy at this point so we build an
4454 empty constructor. The middle-end will fill it in with zeros. */
4455 t
= build_constructor (template_type
,
4457 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4458 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4459 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4462 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4463 fold_convert (p_array_type
, null_pointer_node
));
4464 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4465 t
= build_constructor (type
, v
);
4466 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4467 TREE_CONSTANT (t
) = 0;
4468 TREE_STATIC (t
) = 1;
4473 /* If EXPR is a thin pointer, make template and data from the record. */
4474 if (TYPE_IS_THIN_POINTER_P (etype
))
4476 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4478 expr
= gnat_protect_expr (expr
);
4480 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4481 the thin pointer value has been shifted so we shift it back to get
4482 the template address. */
4483 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4486 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4487 fold_build1 (NEGATE_EXPR
, sizetype
,
4489 (DECL_CHAIN (field
))));
4491 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))),
4495 /* Otherwise we explicitly take the address of the fields. */
4498 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
, expr
);
4500 = build_unary_op (ADDR_EXPR
, NULL_TREE
,
4501 build_component_ref (expr
, NULL_TREE
, field
,
4503 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4504 build_component_ref (expr
, NULL_TREE
,
4510 /* Otherwise, build the constructor for the template. */
4513 = build_unary_op (ADDR_EXPR
, NULL_TREE
,
4514 build_template (template_type
, TREE_TYPE (etype
),
4517 /* The final result is a constructor for the fat pointer.
4519 If EXPR is an argument of a foreign convention subprogram, the type it
4520 points to is directly the component type. In this case, the expression
4521 type may not match the corresponding FIELD_DECL type at this point, so we
4522 call "convert" here to fix that up if necessary. This type consistency is
4523 required, for instance because it ensures that possible later folding of
4524 COMPONENT_REFs against this constructor always yields something of the
4525 same type as the initial reference.
4527 Note that the call to "build_template" above is still fine because it
4528 will only refer to the provided TEMPLATE_TYPE in this case. */
4529 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
), convert (p_array_type
, expr
));
4530 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), template_addr
);
4531 return gnat_build_constructor (type
, v
);
4534 /* Create an expression whose value is that of EXPR,
4535 converted to type TYPE. The TREE_TYPE of the value
4536 is always TYPE. This function implements all reasonable
4537 conversions; callers should filter out those that are
4538 not permitted by the language being compiled. */
4541 convert (tree type
, tree expr
)
4543 tree etype
= TREE_TYPE (expr
);
4544 enum tree_code ecode
= TREE_CODE (etype
);
4545 enum tree_code code
= TREE_CODE (type
);
4547 /* If the expression is already of the right type, we are done. */
4551 /* If both input and output have padding and are of variable size, do this
4552 as an unchecked conversion. Likewise if one is a mere variant of the
4553 other, so we avoid a pointless unpad/repad sequence. */
4554 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4555 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4556 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4557 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4558 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4559 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4560 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4563 /* If the output type has padding, convert to the inner type and make a
4564 constructor to build the record, unless a variable size is involved. */
4565 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4567 vec
<constructor_elt
, va_gc
> *v
;
4569 /* If we previously converted from another type and our type is
4570 of variable size, remove the conversion to avoid the need for
4571 variable-sized temporaries. Likewise for a conversion between
4572 original and packable version. */
4573 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4574 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4575 || (ecode
== RECORD_TYPE
4576 && TYPE_NAME (etype
)
4577 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4578 expr
= TREE_OPERAND (expr
, 0);
4580 /* If we are just removing the padding from expr, convert the original
4581 object if we have variable size in order to avoid the need for some
4582 variable-sized temporaries. Likewise if the padding is a variant
4583 of the other, so we avoid a pointless unpad/repad sequence. */
4584 if (TREE_CODE (expr
) == COMPONENT_REF
4585 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4586 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4587 || TYPE_MAIN_VARIANT (type
)
4588 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4589 || (ecode
== RECORD_TYPE
4590 && TYPE_NAME (etype
)
4591 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4592 return convert (type
, TREE_OPERAND (expr
, 0));
4594 /* If the inner type is of self-referential size and the expression type
4595 is a record, do this as an unchecked conversion. But first pad the
4596 expression if possible to have the same size on both sides. */
4597 if (ecode
== RECORD_TYPE
4598 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4600 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4601 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4602 false, false, false, true),
4604 return unchecked_convert (type
, expr
, false);
4607 /* If we are converting between array types with variable size, do the
4608 final conversion as an unchecked conversion, again to avoid the need
4609 for some variable-sized temporaries. If valid, this conversion is
4610 very likely purely technical and without real effects. */
4611 if (ecode
== ARRAY_TYPE
4612 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4613 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4614 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4615 return unchecked_convert (type
,
4616 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4621 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4622 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4623 return gnat_build_constructor (type
, v
);
4626 /* If the input type has padding, remove it and convert to the output type.
4627 The conditions ordering is arranged to ensure that the output type is not
4628 a padding type here, as it is not clear whether the conversion would
4629 always be correct if this was to happen. */
4630 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4634 /* If we have just converted to this padded type, just get the
4635 inner expression. */
4636 if (TREE_CODE (expr
) == CONSTRUCTOR
4637 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr
))
4638 && (*CONSTRUCTOR_ELTS (expr
))[0].index
== TYPE_FIELDS (etype
))
4639 unpadded
= (*CONSTRUCTOR_ELTS (expr
))[0].value
;
4641 /* Otherwise, build an explicit component reference. */
4644 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4646 return convert (type
, unpadded
);
4649 /* If the input is a biased type, adjust first. */
4650 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4651 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4652 fold_convert (TREE_TYPE (etype
),
4654 TYPE_MIN_VALUE (etype
)));
4656 /* If the input is a justified modular type, we need to extract the actual
4657 object before converting it to any other type with the exceptions of an
4658 unconstrained array or of a mere type variant. It is useful to avoid the
4659 extraction and conversion in the type variant case because it could end
4660 up replacing a VAR_DECL expr by a constructor and we might be about the
4661 take the address of the result. */
4662 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4663 && code
!= UNCONSTRAINED_ARRAY_TYPE
4664 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4665 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4666 TYPE_FIELDS (etype
), false));
4668 /* If converting to a type that contains a template, convert to the data
4669 type and then build the template. */
4670 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4672 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4673 vec
<constructor_elt
, va_gc
> *v
;
4676 /* If the source already has a template, get a reference to the
4677 associated array only, as we are going to rebuild a template
4678 for the target type anyway. */
4679 expr
= maybe_unconstrained_array (expr
);
4681 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4682 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4683 obj_type
, NULL_TREE
));
4684 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4685 convert (obj_type
, expr
));
4686 return gnat_build_constructor (type
, v
);
4689 /* There are some cases of expressions that we process specially. */
4690 switch (TREE_CODE (expr
))
4696 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4697 conversion in gnat_expand_expr. NULL_EXPR does not represent
4698 and actual value, so no conversion is needed. */
4699 expr
= copy_node (expr
);
4700 TREE_TYPE (expr
) = type
;
4704 /* If we are converting a STRING_CST to another constrained array type,
4705 just make a new one in the proper type. */
4706 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4707 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4708 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4710 expr
= copy_node (expr
);
4711 TREE_TYPE (expr
) = type
;
4717 /* If we are converting a VECTOR_CST to a mere type variant, just make
4718 a new one in the proper type. */
4719 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4721 expr
= copy_node (expr
);
4722 TREE_TYPE (expr
) = type
;
4727 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4728 another padding type around the same type, just make a new one in
4731 && (gnat_types_compatible_p (type
, etype
)
4732 || (code
== RECORD_TYPE
4733 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4734 && TREE_TYPE (TYPE_FIELDS (type
))
4735 == TREE_TYPE (TYPE_FIELDS (etype
)))))
4737 expr
= copy_node (expr
);
4738 TREE_TYPE (expr
) = type
;
4739 CONSTRUCTOR_ELTS (expr
) = vec_safe_copy (CONSTRUCTOR_ELTS (expr
));
4743 /* Likewise for a conversion between original and packable version, or
4744 conversion between types of the same size and with the same list of
4745 fields, but we have to work harder to preserve type consistency. */
4747 && code
== RECORD_TYPE
4748 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4749 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4752 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4753 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4754 vec
<constructor_elt
, va_gc
> *v
;
4756 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4757 unsigned HOST_WIDE_INT idx
;
4760 /* Whether we need to clear TREE_CONSTANT et al. on the output
4761 constructor when we convert in place. */
4762 bool clear_constant
= false;
4764 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4766 /* Skip the missing fields in the CONSTRUCTOR. */
4767 while (efield
&& field
&& !SAME_FIELD_P (efield
, index
))
4769 efield
= DECL_CHAIN (efield
);
4770 field
= DECL_CHAIN (field
);
4772 /* The field must be the same. */
4773 if (!(efield
&& field
&& SAME_FIELD_P (efield
, field
)))
4776 = {field
, convert (TREE_TYPE (field
), value
)};
4777 v
->quick_push (elt
);
4779 /* If packing has made this field a bitfield and the input
4780 value couldn't be emitted statically any more, we need to
4781 clear TREE_CONSTANT on our output. */
4783 && TREE_CONSTANT (expr
)
4784 && !CONSTRUCTOR_BITFIELD_P (efield
)
4785 && CONSTRUCTOR_BITFIELD_P (field
)
4786 && !initializer_constant_valid_for_bitfield_p (value
))
4787 clear_constant
= true;
4789 efield
= DECL_CHAIN (efield
);
4790 field
= DECL_CHAIN (field
);
4793 /* If we have been able to match and convert all the input fields
4794 to their output type, convert in place now. We'll fallback to a
4795 view conversion downstream otherwise. */
4798 expr
= copy_node (expr
);
4799 TREE_TYPE (expr
) = type
;
4800 CONSTRUCTOR_ELTS (expr
) = v
;
4802 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4807 /* Likewise for a conversion between array type and vector type with a
4808 compatible representative array. */
4809 else if (code
== VECTOR_TYPE
4810 && ecode
== ARRAY_TYPE
4811 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4814 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4815 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4816 vec
<constructor_elt
, va_gc
> *v
;
4817 unsigned HOST_WIDE_INT ix
;
4820 /* Build a VECTOR_CST from a *constant* array constructor. */
4821 if (TREE_CONSTANT (expr
))
4823 bool constant_p
= true;
4825 /* Iterate through elements and check if all constructor
4826 elements are *_CSTs. */
4827 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4828 if (!CONSTANT_CLASS_P (value
))
4835 return build_vector_from_ctor (type
,
4836 CONSTRUCTOR_ELTS (expr
));
4839 /* Otherwise, build a regular vector constructor. */
4841 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4843 constructor_elt elt
= {NULL_TREE
, value
};
4844 v
->quick_push (elt
);
4846 expr
= copy_node (expr
);
4847 TREE_TYPE (expr
) = type
;
4848 CONSTRUCTOR_ELTS (expr
) = v
;
4853 case UNCONSTRAINED_ARRAY_REF
:
4854 /* First retrieve the underlying array. */
4855 expr
= maybe_unconstrained_array (expr
);
4856 etype
= TREE_TYPE (expr
);
4857 ecode
= TREE_CODE (etype
);
4860 case VIEW_CONVERT_EXPR
:
4862 /* GCC 4.x is very sensitive to type consistency overall, and view
4863 conversions thus are very frequent. Even though just "convert"ing
4864 the inner operand to the output type is fine in most cases, it
4865 might expose unexpected input/output type mismatches in special
4866 circumstances so we avoid such recursive calls when we can. */
4867 tree op0
= TREE_OPERAND (expr
, 0);
4869 /* If we are converting back to the original type, we can just
4870 lift the input conversion. This is a common occurrence with
4871 switches back-and-forth amongst type variants. */
4872 if (type
== TREE_TYPE (op0
))
4875 /* Otherwise, if we're converting between two aggregate or vector
4876 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4877 target type in place or to just convert the inner expression. */
4878 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4879 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4881 /* If we are converting between mere variants, we can just
4882 substitute the VIEW_CONVERT_EXPR in place. */
4883 if (gnat_types_compatible_p (type
, etype
))
4884 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4886 /* Otherwise, we may just bypass the input view conversion unless
4887 one of the types is a fat pointer, which is handled by
4888 specialized code below which relies on exact type matching. */
4889 else if (!TYPE_IS_FAT_POINTER_P (type
)
4890 && !TYPE_IS_FAT_POINTER_P (etype
))
4891 return convert (type
, op0
);
4901 /* Check for converting to a pointer to an unconstrained array. */
4902 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4903 return convert_to_fat_pointer (type
, expr
);
4905 /* If we are converting between two aggregate or vector types that are mere
4906 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4907 to a vector type from its representative array type. */
4908 else if ((code
== ecode
4909 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4910 && gnat_types_compatible_p (type
, etype
))
4911 || (code
== VECTOR_TYPE
4912 && ecode
== ARRAY_TYPE
4913 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4915 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4917 /* If we are converting between tagged types, try to upcast properly. */
4918 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4919 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4921 tree child_etype
= etype
;
4923 tree field
= TYPE_FIELDS (child_etype
);
4924 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4925 return build_component_ref (expr
, NULL_TREE
, field
, false);
4926 child_etype
= TREE_TYPE (field
);
4927 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4930 /* If we are converting from a smaller form of record type back to it, just
4931 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4932 size on both sides. */
4933 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4934 && smaller_form_type_p (etype
, type
))
4936 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4937 false, false, false, true),
4939 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4942 /* In all other cases of related types, make a NOP_EXPR. */
4943 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4944 return fold_convert (type
, expr
);
4949 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4952 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4953 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4954 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4955 return unchecked_convert (type
, expr
, false);
4956 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4957 return fold_convert (type
,
4958 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4959 convert (TREE_TYPE (type
), expr
),
4960 TYPE_MIN_VALUE (type
)));
4962 /* ... fall through ... */
4966 /* If we are converting an additive expression to an integer type
4967 with lower precision, be wary of the optimization that can be
4968 applied by convert_to_integer. There are 2 problematic cases:
4969 - if the first operand was originally of a biased type,
4970 because we could be recursively called to convert it
4971 to an intermediate type and thus rematerialize the
4972 additive operator endlessly,
4973 - if the expression contains a placeholder, because an
4974 intermediate conversion that changes the sign could
4975 be inserted and thus introduce an artificial overflow
4976 at compile time when the placeholder is substituted. */
4977 if (code
== INTEGER_TYPE
4978 && ecode
== INTEGER_TYPE
4979 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4980 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4982 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4984 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4985 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4986 || CONTAINS_PLACEHOLDER_P (expr
))
4987 return build1 (NOP_EXPR
, type
, expr
);
4990 return fold (convert_to_integer (type
, expr
));
4993 case REFERENCE_TYPE
:
4994 /* If converting between two thin pointers, adjust if needed to account
4995 for differing offsets from the base pointer, depending on whether
4996 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4997 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
5000 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
5001 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
5004 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
5005 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
5007 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
5009 expr
= build1 (NOP_EXPR
, type
, expr
);
5010 if (integer_zerop (byte_diff
))
5013 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
5014 fold_convert (sizetype
, byte_diff
));
5017 /* If converting fat pointer to normal or thin pointer, get the pointer
5018 to the array and then convert it. */
5019 if (TYPE_IS_FAT_POINTER_P (etype
))
5021 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
5023 return fold (convert_to_pointer (type
, expr
));
5026 return fold (convert_to_real (type
, expr
));
5029 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
5031 vec
<constructor_elt
, va_gc
> *v
;
5034 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
5035 convert (TREE_TYPE (TYPE_FIELDS (type
)),
5037 return gnat_build_constructor (type
, v
);
5040 /* ... fall through ... */
5043 /* In these cases, assume the front-end has validated the conversion.
5044 If the conversion is valid, it will be a bit-wise conversion, so
5045 it can be viewed as an unchecked conversion. */
5046 return unchecked_convert (type
, expr
, false);
5049 /* This is a either a conversion between a tagged type and some
5050 subtype, which we have to mark as a UNION_TYPE because of
5051 overlapping fields or a conversion of an Unchecked_Union. */
5052 return unchecked_convert (type
, expr
, false);
5054 case UNCONSTRAINED_ARRAY_TYPE
:
5055 /* If the input is a VECTOR_TYPE, convert to the representative
5056 array type first. */
5057 if (ecode
== VECTOR_TYPE
)
5059 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
5060 etype
= TREE_TYPE (expr
);
5061 ecode
= TREE_CODE (etype
);
5064 /* If EXPR is a constrained array, take its address, convert it to a
5065 fat pointer, and then dereference it. Likewise if EXPR is a
5066 record containing both a template and a constrained array.
5067 Note that a record representing a justified modular type
5068 always represents a packed constrained array. */
5069 if (ecode
== ARRAY_TYPE
5070 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
5071 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
5072 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
5075 (INDIRECT_REF
, NULL_TREE
,
5076 convert_to_fat_pointer (TREE_TYPE (type
),
5077 build_unary_op (ADDR_EXPR
,
5080 /* Do something very similar for converting one unconstrained
5081 array to another. */
5082 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
5084 build_unary_op (INDIRECT_REF
, NULL_TREE
,
5085 convert (TREE_TYPE (type
),
5086 build_unary_op (ADDR_EXPR
,
5092 return fold (convert_to_complex (type
, expr
));
5099 /* Create an expression whose value is that of EXPR converted to the common
5100 index type, which is sizetype. EXPR is supposed to be in the base type
5101 of the GNAT index type. Calling it is equivalent to doing
5103 convert (sizetype, expr)
5105 but we try to distribute the type conversion with the knowledge that EXPR
5106 cannot overflow in its type. This is a best-effort approach and we fall
5107 back to the above expression as soon as difficulties are encountered.
5109 This is necessary to overcome issues that arise when the GNAT base index
5110 type and the GCC common index type (sizetype) don't have the same size,
5111 which is quite frequent on 64-bit architectures. In this case, and if
5112 the GNAT base index type is signed but the iteration type of the loop has
5113 been forced to unsigned, the loop scalar evolution engine cannot compute
5114 a simple evolution for the general induction variables associated with the
5115 array indices, because it will preserve the wrap-around semantics in the
5116 unsigned type of their "inner" part. As a result, many loop optimizations
5119 The solution is to use a special (basic) induction variable that is at
5120 least as large as sizetype, and to express the aforementioned general
5121 induction variables in terms of this induction variable, eliminating
5122 the problematic intermediate truncation to the GNAT base index type.
5123 This is possible as long as the original expression doesn't overflow
5124 and if the middle-end hasn't introduced artificial overflows in the
5125 course of the various simplification it can make to the expression. */
5128 convert_to_index_type (tree expr
)
5130 enum tree_code code
= TREE_CODE (expr
);
5131 tree type
= TREE_TYPE (expr
);
5133 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5134 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5135 if (TYPE_UNSIGNED (type
) || !optimize
)
5136 return convert (sizetype
, expr
);
5141 /* The main effect of the function: replace a loop parameter with its
5142 associated special induction variable. */
5143 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
5144 expr
= DECL_INDUCTION_VAR (expr
);
5149 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5150 /* Bail out as soon as we suspect some sort of type frobbing. */
5151 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
5152 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
5156 /* ... fall through ... */
5158 case NON_LVALUE_EXPR
:
5159 return fold_build1 (code
, sizetype
,
5160 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5165 return fold_build2 (code
, sizetype
,
5166 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5167 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5170 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5171 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5174 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5175 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5176 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5182 return convert (sizetype
, expr
);
5185 /* Remove all conversions that are done in EXP. This includes converting
5186 from a padded type or to a justified modular type. If TRUE_ADDRESS
5187 is true, always return the address of the containing object even if
5188 the address is not bit-aligned. */
5191 remove_conversions (tree exp
, bool true_address
)
5193 switch (TREE_CODE (exp
))
5197 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5198 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5200 remove_conversions ((*CONSTRUCTOR_ELTS (exp
))[0].value
, true);
5204 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5205 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5209 case VIEW_CONVERT_EXPR
:
5210 case NON_LVALUE_EXPR
:
5211 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5220 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5221 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5222 likewise return an expression pointing to the underlying array. */
5225 maybe_unconstrained_array (tree exp
)
5227 enum tree_code code
= TREE_CODE (exp
);
5228 tree type
= TREE_TYPE (exp
);
5230 switch (TREE_CODE (type
))
5232 case UNCONSTRAINED_ARRAY_TYPE
:
5233 if (code
== UNCONSTRAINED_ARRAY_REF
)
5235 const bool read_only
= TREE_READONLY (exp
);
5236 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5238 exp
= TREE_OPERAND (exp
, 0);
5239 type
= TREE_TYPE (exp
);
5241 if (TREE_CODE (exp
) == COND_EXPR
)
5244 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5245 build_component_ref (TREE_OPERAND (exp
, 1),
5250 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5251 build_component_ref (TREE_OPERAND (exp
, 2),
5256 exp
= build3 (COND_EXPR
,
5257 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5258 TREE_OPERAND (exp
, 0), op1
, op2
);
5262 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5263 build_component_ref (exp
, NULL_TREE
,
5266 TREE_READONLY (exp
) = read_only
;
5267 TREE_THIS_NOTRAP (exp
) = no_trap
;
5271 else if (code
== NULL_EXPR
)
5272 exp
= build1 (NULL_EXPR
,
5273 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5274 TREE_OPERAND (exp
, 0));
5278 /* If this is a padded type and it contains a template, convert to the
5279 unpadded type first. */
5280 if (TYPE_PADDING_P (type
)
5281 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5282 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5284 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5285 type
= TREE_TYPE (exp
);
5288 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5290 exp
= build_component_ref (exp
, NULL_TREE
,
5291 DECL_CHAIN (TYPE_FIELDS (type
)),
5293 type
= TREE_TYPE (exp
);
5295 /* If the array type is padded, convert to the unpadded type. */
5296 if (TYPE_IS_PADDING_P (type
))
5297 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5308 /* Return true if EXPR is an expression that can be folded as an operand
5309 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5312 can_fold_for_view_convert_p (tree expr
)
5316 /* The folder will fold NOP_EXPRs between integral types with the same
5317 precision (in the middle-end's sense). We cannot allow it if the
5318 types don't have the same precision in the Ada sense as well. */
5319 if (TREE_CODE (expr
) != NOP_EXPR
)
5322 t1
= TREE_TYPE (expr
);
5323 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5325 /* Defer to the folder for non-integral conversions. */
5326 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5329 /* Only fold conversions that preserve both precisions. */
5330 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5331 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5337 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5338 If NOTRUNC_P is true, truncation operations should be suppressed.
5340 Special care is required with (source or target) integral types whose
5341 precision is not equal to their size, to make sure we fetch or assign
5342 the value bits whose location might depend on the endianness, e.g.
5344 Rmsize : constant := 8;
5345 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5347 type Bit_Array is array (1 .. Rmsize) of Boolean;
5348 pragma Pack (Bit_Array);
5350 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5352 Value : Int := 2#1000_0001#;
5353 Vbits : Bit_Array := To_Bit_Array (Value);
5355 we expect the 8 bits at Vbits'Address to always contain Value, while
5356 their original location depends on the endianness, at Value'Address
5357 on a little-endian architecture but not on a big-endian one. */
5360 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5362 tree etype
= TREE_TYPE (expr
);
5363 enum tree_code ecode
= TREE_CODE (etype
);
5364 enum tree_code code
= TREE_CODE (type
);
5368 /* If the expression is already of the right type, we are done. */
5372 /* If both types types are integral just do a normal conversion.
5373 Likewise for a conversion to an unconstrained array. */
5374 if ((((INTEGRAL_TYPE_P (type
)
5375 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5376 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5377 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5378 && ((INTEGRAL_TYPE_P (etype
)
5379 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5380 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5381 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5382 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5384 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5386 tree ntype
= copy_type (etype
);
5387 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5388 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5389 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5392 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5394 tree rtype
= copy_type (type
);
5395 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5396 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5397 expr
= convert (rtype
, expr
);
5398 expr
= build1 (NOP_EXPR
, type
, expr
);
5401 expr
= convert (type
, expr
);
5404 /* If we are converting to an integral type whose precision is not equal
5405 to its size, first unchecked convert to a record type that contains an
5406 field of the given precision. Then extract the field. */
5407 else if (INTEGRAL_TYPE_P (type
)
5408 && TYPE_RM_SIZE (type
)
5409 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5410 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5412 tree rec_type
= make_node (RECORD_TYPE
);
5413 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5414 tree field_type
, field
;
5416 if (TYPE_UNSIGNED (type
))
5417 field_type
= make_unsigned_type (prec
);
5419 field_type
= make_signed_type (prec
);
5420 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5422 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5423 NULL_TREE
, bitsize_zero_node
, 1, 0);
5425 finish_record_type (rec_type
, field
, 1, false);
5427 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5428 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5429 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5432 /* Similarly if we are converting from an integral type whose precision is
5433 not equal to its size, first copy into a field of the given precision
5434 and unchecked convert the record type. */
5435 else if (INTEGRAL_TYPE_P (etype
)
5436 && TYPE_RM_SIZE (etype
)
5437 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5438 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5440 tree rec_type
= make_node (RECORD_TYPE
);
5441 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5442 vec
<constructor_elt
, va_gc
> *v
;
5444 tree field_type
, field
;
5446 if (TYPE_UNSIGNED (etype
))
5447 field_type
= make_unsigned_type (prec
);
5449 field_type
= make_signed_type (prec
);
5450 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5452 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5453 NULL_TREE
, bitsize_zero_node
, 1, 0);
5455 finish_record_type (rec_type
, field
, 1, false);
5457 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5458 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5459 expr
= gnat_build_constructor (rec_type
, v
);
5460 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5463 /* If we are converting from a scalar type to a type with a different size,
5464 we need to pad to have the same size on both sides.
5466 ??? We cannot do it unconditionally because unchecked conversions are
5467 used liberally by the front-end to implement polymorphism, e.g. in:
5469 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5470 return p___size__4 (p__object!(S191s.all));
5472 so we skip all expressions that are references. */
5473 else if (!REFERENCE_CLASS_P (expr
)
5474 && !AGGREGATE_TYPE_P (etype
)
5475 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5476 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5480 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5481 false, false, false, true),
5483 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5487 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5488 false, false, false, true);
5489 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5490 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5495 /* We have a special case when we are converting between two unconstrained
5496 array types. In that case, take the address, convert the fat pointer
5497 types, and dereference. */
5498 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5499 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5500 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5501 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5504 /* Another special case is when we are converting to a vector type from its
5505 representative array type; this a regular conversion. */
5506 else if (code
== VECTOR_TYPE
5507 && ecode
== ARRAY_TYPE
5508 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5510 expr
= convert (type
, expr
);
5512 /* And, if the array type is not the representative, we try to build an
5513 intermediate vector type of which the array type is the representative
5514 and to do the unchecked conversion between the vector types, in order
5515 to enable further simplifications in the middle-end. */
5516 else if (code
== VECTOR_TYPE
5517 && ecode
== ARRAY_TYPE
5518 && (tem
= build_vector_type_for_array (etype
, NULL_TREE
)))
5520 expr
= convert (tem
, expr
);
5521 return unchecked_convert (type
, expr
, notrunc_p
);
5524 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
5525 the alignment of the CONSTRUCTOR to speed up the copy operation. */
5526 else if (TREE_CODE (expr
) == CONSTRUCTOR
5527 && code
== RECORD_TYPE
5528 && TYPE_ALIGN (etype
) < TYPE_ALIGN (type
))
5530 expr
= convert (maybe_pad_type (etype
, NULL_TREE
, TYPE_ALIGN (type
),
5531 Empty
, false, false, false, true),
5533 return unchecked_convert (type
, expr
, notrunc_p
);
5536 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
5539 expr
= maybe_unconstrained_array (expr
);
5540 etype
= TREE_TYPE (expr
);
5541 ecode
= TREE_CODE (etype
);
5542 if (can_fold_for_view_convert_p (expr
))
5543 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5545 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5548 /* If the result is an integral type whose precision is not equal to its
5549 size, sign- or zero-extend the result. We need not do this if the input
5550 is an integral type of the same precision and signedness or if the output
5551 is a biased type or if both the input and output are unsigned. */
5553 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5554 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5555 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5556 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5557 && !(INTEGRAL_TYPE_P (etype
)
5558 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5559 && operand_equal_p (TYPE_RM_SIZE (type
),
5560 (TYPE_RM_SIZE (etype
) != 0
5561 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5563 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5566 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5568 = convert (base_type
,
5569 size_binop (MINUS_EXPR
,
5571 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5572 TYPE_RM_SIZE (type
)));
5575 build_binary_op (RSHIFT_EXPR
, base_type
,
5576 build_binary_op (LSHIFT_EXPR
, base_type
,
5577 convert (base_type
, expr
),
5582 /* An unchecked conversion should never raise Constraint_Error. The code
5583 below assumes that GCC's conversion routines overflow the same way that
5584 the underlying hardware does. This is probably true. In the rare case
5585 when it is false, we can rely on the fact that such conversions are
5586 erroneous anyway. */
5587 if (TREE_CODE (expr
) == INTEGER_CST
)
5588 TREE_OVERFLOW (expr
) = 0;
5590 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5591 show no longer constant. */
5592 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5593 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5595 TREE_CONSTANT (expr
) = 0;
5600 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5601 the latter being a record type as predicated by Is_Record_Type. */
5604 tree_code_for_record_type (Entity_Id gnat_type
)
5606 Node_Id component_list
, component
;
5608 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5609 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5610 if (!Is_Unchecked_Union (gnat_type
))
5613 gnat_type
= Implementation_Base_Type (gnat_type
);
5615 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5617 for (component
= First_Non_Pragma (Component_Items (component_list
));
5618 Present (component
);
5619 component
= Next_Non_Pragma (component
))
5620 if (Ekind (Defining_Entity (component
)) == E_Component
)
5626 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5627 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5628 according to the presence of an alignment clause on the type or, if it
5629 is an array, on the component type. */
5632 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5634 gnat_type
= Underlying_Type (gnat_type
);
5636 *align_clause
= Present (Alignment_Clause (gnat_type
));
5638 if (Is_Array_Type (gnat_type
))
5640 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5641 if (Present (Alignment_Clause (gnat_type
)))
5642 *align_clause
= true;
5645 if (!Is_Floating_Point_Type (gnat_type
))
5648 if (UI_To_Int (Esize (gnat_type
)) != 64)
5654 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5655 size is greater or equal to 64 bits, or an array of such a type. Set
5656 ALIGN_CLAUSE according to the presence of an alignment clause on the
5657 type or, if it is an array, on the component type. */
5660 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5662 gnat_type
= Underlying_Type (gnat_type
);
5664 *align_clause
= Present (Alignment_Clause (gnat_type
));
5666 if (Is_Array_Type (gnat_type
))
5668 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5669 if (Present (Alignment_Clause (gnat_type
)))
5670 *align_clause
= true;
5673 if (!Is_Scalar_Type (gnat_type
))
5676 if (UI_To_Int (Esize (gnat_type
)) < 64)
5682 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5683 component of an aggregate type. */
5686 type_for_nonaliased_component_p (tree gnu_type
)
5688 /* If the type is passed by reference, we may have pointers to the
5689 component so it cannot be made non-aliased. */
5690 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5693 /* We used to say that any component of aggregate type is aliased
5694 because the front-end may take 'Reference of it. The front-end
5695 has been enhanced in the meantime so as to use a renaming instead
5696 in most cases, but the back-end can probably take the address of
5697 such a component too so we go for the conservative stance.
5699 For instance, we might need the address of any array type, even
5700 if normally passed by copy, to construct a fat pointer if the
5701 component is used as an actual for an unconstrained formal.
5703 Likewise for record types: even if a specific record subtype is
5704 passed by copy, the parent type might be passed by ref (e.g. if
5705 it's of variable size) and we might take the address of a child
5706 component to pass to a parent formal. We have no way to check
5707 for such conditions here. */
5708 if (AGGREGATE_TYPE_P (gnu_type
))
5714 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5717 smaller_form_type_p (tree type
, tree orig_type
)
5721 /* We're not interested in variants here. */
5722 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5725 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5726 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5729 size
= TYPE_SIZE (type
);
5730 osize
= TYPE_SIZE (orig_type
);
5732 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5735 return tree_int_cst_lt (size
, osize
) != 0;
5738 /* Perform final processing on global variables. */
5740 static GTY (()) tree dummy_global
;
5743 gnat_write_global_declarations (void)
5748 /* If we have declared types as used at the global level, insert them in
5749 the global hash table. We use a dummy variable for this purpose. */
5750 if (types_used_by_cur_var_decl
&& !types_used_by_cur_var_decl
->is_empty ())
5752 struct varpool_node
*node
;
5755 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5757 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5759 TREE_STATIC (dummy_global
) = 1;
5760 TREE_ASM_WRITTEN (dummy_global
) = 1;
5761 node
= varpool_node_for_decl (dummy_global
);
5762 node
->force_output
= 1;
5764 while (!types_used_by_cur_var_decl
->is_empty ())
5766 tree t
= types_used_by_cur_var_decl
->pop ();
5767 types_used_by_var_decl_insert (t
, dummy_global
);
5771 /* Output debug information for all global type declarations first. This
5772 ensures that global types whose compilation hasn't been finalized yet,
5773 for example pointers to Taft amendment types, have their compilation
5774 finalized in the right context. */
5775 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5776 if (TREE_CODE (iter
) == TYPE_DECL
)
5777 debug_hooks
->global_decl (iter
);
5779 /* Proceed to optimize and emit assembly. */
5780 finalize_compilation_unit ();
5782 /* After cgraph has had a chance to emit everything that's going to
5783 be emitted, output debug information for the rest of globals. */
5786 timevar_push (TV_SYMOUT
);
5787 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5788 if (TREE_CODE (iter
) != TYPE_DECL
)
5789 debug_hooks
->global_decl (iter
);
5790 timevar_pop (TV_SYMOUT
);
5794 /* ************************************************************************
5795 * * GCC builtins support *
5796 * ************************************************************************ */
5798 /* The general scheme is fairly simple:
5800 For each builtin function/type to be declared, gnat_install_builtins calls
5801 internal facilities which eventually get to gnat_push_decl, which in turn
5802 tracks the so declared builtin function decls in the 'builtin_decls' global
5803 datastructure. When an Intrinsic subprogram declaration is processed, we
5804 search this global datastructure to retrieve the associated BUILT_IN DECL
5807 /* Search the chain of currently available builtin declarations for a node
5808 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5809 found, if any, or NULL_TREE otherwise. */
5811 builtin_decl_for (tree name
)
5816 FOR_EACH_VEC_SAFE_ELT (builtin_decls
, i
, decl
)
5817 if (DECL_NAME (decl
) == name
)
5823 /* The code below eventually exposes gnat_install_builtins, which declares
5824 the builtin types and functions we might need, either internally or as
5825 user accessible facilities.
5827 ??? This is a first implementation shot, still in rough shape. It is
5828 heavily inspired from the "C" family implementation, with chunks copied
5829 verbatim from there.
5831 Two obvious TODO candidates are
5832 o Use a more efficient name/decl mapping scheme
5833 o Devise a middle-end infrastructure to avoid having to copy
5834 pieces between front-ends. */
5836 /* ----------------------------------------------------------------------- *
5837 * BUILTIN ELEMENTARY TYPES *
5838 * ----------------------------------------------------------------------- */
5840 /* Standard data types to be used in builtin argument declarations. */
5844 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5846 CTI_CONST_STRING_TYPE
,
5851 static tree c_global_trees
[CTI_MAX
];
5853 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5854 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5855 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5857 /* ??? In addition some attribute handlers, we currently don't support a
5858 (small) number of builtin-types, which in turns inhibits support for a
5859 number of builtin functions. */
5860 #define wint_type_node void_type_node
5861 #define intmax_type_node void_type_node
5862 #define uintmax_type_node void_type_node
5864 /* Build the void_list_node (void_type_node having been created). */
5867 build_void_list_node (void)
5869 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5873 /* Used to help initialize the builtin-types.def table. When a type of
5874 the correct size doesn't exist, use error_mark_node instead of NULL.
5875 The later results in segfaults even when a decl using the type doesn't
5879 builtin_type_for_size (int size
, bool unsignedp
)
5881 tree type
= gnat_type_for_size (size
, unsignedp
);
5882 return type
? type
: error_mark_node
;
5885 /* Build/push the elementary type decls that builtin functions/types
5889 install_builtin_elementary_types (void)
5891 signed_size_type_node
= gnat_signed_type (size_type_node
);
5892 pid_type_node
= integer_type_node
;
5893 void_list_node
= build_void_list_node ();
5895 string_type_node
= build_pointer_type (char_type_node
);
5896 const_string_type_node
5897 = build_pointer_type (build_qualified_type
5898 (char_type_node
, TYPE_QUAL_CONST
));
5901 /* ----------------------------------------------------------------------- *
5902 * BUILTIN FUNCTION TYPES *
5903 * ----------------------------------------------------------------------- */
5905 /* Now, builtin function types per se. */
5909 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5910 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5911 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5912 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5913 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5914 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5915 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5916 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5917 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5918 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5919 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5920 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5921 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5922 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5923 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5924 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5926 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5927 #include "builtin-types.def"
5928 #undef DEF_PRIMITIVE_TYPE
5929 #undef DEF_FUNCTION_TYPE_0
5930 #undef DEF_FUNCTION_TYPE_1
5931 #undef DEF_FUNCTION_TYPE_2
5932 #undef DEF_FUNCTION_TYPE_3
5933 #undef DEF_FUNCTION_TYPE_4
5934 #undef DEF_FUNCTION_TYPE_5
5935 #undef DEF_FUNCTION_TYPE_6
5936 #undef DEF_FUNCTION_TYPE_7
5937 #undef DEF_FUNCTION_TYPE_8
5938 #undef DEF_FUNCTION_TYPE_VAR_0
5939 #undef DEF_FUNCTION_TYPE_VAR_1
5940 #undef DEF_FUNCTION_TYPE_VAR_2
5941 #undef DEF_FUNCTION_TYPE_VAR_3
5942 #undef DEF_FUNCTION_TYPE_VAR_4
5943 #undef DEF_FUNCTION_TYPE_VAR_5
5944 #undef DEF_POINTER_TYPE
5948 typedef enum c_builtin_type builtin_type
;
5950 /* A temporary array used in communication with def_fn_type. */
5951 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5953 /* A helper function for install_builtin_types. Build function type
5954 for DEF with return type RET and N arguments. If VAR is true, then the
5955 function should be variadic after those N arguments.
5957 Takes special care not to ICE if any of the types involved are
5958 error_mark_node, which indicates that said type is not in fact available
5959 (see builtin_type_for_size). In which case the function type as a whole
5960 should be error_mark_node. */
5963 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5966 tree
*args
= XALLOCAVEC (tree
, n
);
5971 for (i
= 0; i
< n
; ++i
)
5973 builtin_type a
= (builtin_type
) va_arg (list
, int);
5974 t
= builtin_types
[a
];
5975 if (t
== error_mark_node
)
5980 t
= builtin_types
[ret
];
5981 if (t
== error_mark_node
)
5984 t
= build_varargs_function_type_array (t
, n
, args
);
5986 t
= build_function_type_array (t
, n
, args
);
5989 builtin_types
[def
] = t
;
5993 /* Build the builtin function types and install them in the builtin_types
5994 array for later use in builtin function decls. */
5997 install_builtin_function_types (void)
5999 tree va_list_ref_type_node
;
6000 tree va_list_arg_type_node
;
6002 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
6004 va_list_arg_type_node
= va_list_ref_type_node
=
6005 build_pointer_type (TREE_TYPE (va_list_type_node
));
6009 va_list_arg_type_node
= va_list_type_node
;
6010 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
6013 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
6014 builtin_types[ENUM] = VALUE;
6015 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
6016 def_fn_type (ENUM, RETURN, 0, 0);
6017 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
6018 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
6019 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
6020 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
6021 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6022 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
6023 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6024 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
6025 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6026 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6027 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6029 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
6030 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6032 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
6033 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
6035 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
6037 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
6038 def_fn_type (ENUM, RETURN, 1, 0);
6039 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
6040 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
6041 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
6042 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
6043 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
6044 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
6045 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
6046 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
6047 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
6048 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
6049 #define DEF_POINTER_TYPE(ENUM, TYPE) \
6050 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
6052 #include "builtin-types.def"
6054 #undef DEF_PRIMITIVE_TYPE
6055 #undef DEF_FUNCTION_TYPE_1
6056 #undef DEF_FUNCTION_TYPE_2
6057 #undef DEF_FUNCTION_TYPE_3
6058 #undef DEF_FUNCTION_TYPE_4
6059 #undef DEF_FUNCTION_TYPE_5
6060 #undef DEF_FUNCTION_TYPE_6
6061 #undef DEF_FUNCTION_TYPE_VAR_0
6062 #undef DEF_FUNCTION_TYPE_VAR_1
6063 #undef DEF_FUNCTION_TYPE_VAR_2
6064 #undef DEF_FUNCTION_TYPE_VAR_3
6065 #undef DEF_FUNCTION_TYPE_VAR_4
6066 #undef DEF_FUNCTION_TYPE_VAR_5
6067 #undef DEF_POINTER_TYPE
6068 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
6071 /* ----------------------------------------------------------------------- *
6072 * BUILTIN ATTRIBUTES *
6073 * ----------------------------------------------------------------------- */
6075 enum built_in_attribute
6077 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
6078 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
6079 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
6080 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
6081 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
6082 #include "builtin-attrs.def"
6083 #undef DEF_ATTR_NULL_TREE
6085 #undef DEF_ATTR_STRING
6086 #undef DEF_ATTR_IDENT
6087 #undef DEF_ATTR_TREE_LIST
6091 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
6094 install_builtin_attributes (void)
6096 /* Fill in the built_in_attributes array. */
6097 #define DEF_ATTR_NULL_TREE(ENUM) \
6098 built_in_attributes[(int) ENUM] = NULL_TREE;
6099 #define DEF_ATTR_INT(ENUM, VALUE) \
6100 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
6101 #define DEF_ATTR_STRING(ENUM, VALUE) \
6102 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
6103 #define DEF_ATTR_IDENT(ENUM, STRING) \
6104 built_in_attributes[(int) ENUM] = get_identifier (STRING);
6105 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
6106 built_in_attributes[(int) ENUM] \
6107 = tree_cons (built_in_attributes[(int) PURPOSE], \
6108 built_in_attributes[(int) VALUE], \
6109 built_in_attributes[(int) CHAIN]);
6110 #include "builtin-attrs.def"
6111 #undef DEF_ATTR_NULL_TREE
6113 #undef DEF_ATTR_STRING
6114 #undef DEF_ATTR_IDENT
6115 #undef DEF_ATTR_TREE_LIST
6118 /* Handle a "const" attribute; arguments as in
6119 struct attribute_spec.handler. */
6122 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6123 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6126 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6127 TREE_READONLY (*node
) = 1;
6129 *no_add_attrs
= true;
6134 /* Handle a "nothrow" attribute; arguments as in
6135 struct attribute_spec.handler. */
6138 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6139 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6142 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6143 TREE_NOTHROW (*node
) = 1;
6145 *no_add_attrs
= true;
6150 /* Handle a "pure" attribute; arguments as in
6151 struct attribute_spec.handler. */
6154 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6155 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6157 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6158 DECL_PURE_P (*node
) = 1;
6159 /* ??? TODO: Support types. */
6162 warning (OPT_Wattributes
, "%qs attribute ignored",
6163 IDENTIFIER_POINTER (name
));
6164 *no_add_attrs
= true;
6170 /* Handle a "no vops" attribute; arguments as in
6171 struct attribute_spec.handler. */
6174 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6175 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6176 bool *ARG_UNUSED (no_add_attrs
))
6178 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
6179 DECL_IS_NOVOPS (*node
) = 1;
6183 /* Helper for nonnull attribute handling; fetch the operand number
6184 from the attribute argument list. */
6187 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6189 /* Verify the arg number is a constant. */
6190 if (!tree_fits_uhwi_p (arg_num_expr
))
6193 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6197 /* Handle the "nonnull" attribute. */
6199 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6200 tree args
, int ARG_UNUSED (flags
),
6204 unsigned HOST_WIDE_INT attr_arg_num
;
6206 /* If no arguments are specified, all pointer arguments should be
6207 non-null. Verify a full prototype is given so that the arguments
6208 will have the correct types when we actually check them later. */
6211 if (!prototype_p (type
))
6213 error ("nonnull attribute without arguments on a non-prototype");
6214 *no_add_attrs
= true;
6219 /* Argument list specified. Verify that each argument number references
6220 a pointer argument. */
6221 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6223 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6225 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6227 error ("nonnull argument has invalid operand number (argument %lu)",
6228 (unsigned long) attr_arg_num
);
6229 *no_add_attrs
= true;
6233 if (prototype_p (type
))
6235 function_args_iterator iter
;
6238 function_args_iter_init (&iter
, type
);
6239 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6241 argument
= function_args_iter_cond (&iter
);
6242 if (!argument
|| ck_num
== arg_num
)
6247 || TREE_CODE (argument
) == VOID_TYPE
)
6249 error ("nonnull argument with out-of-range operand number "
6250 "(argument %lu, operand %lu)",
6251 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6252 *no_add_attrs
= true;
6256 if (TREE_CODE (argument
) != POINTER_TYPE
)
6258 error ("nonnull argument references non-pointer operand "
6259 "(argument %lu, operand %lu)",
6260 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6261 *no_add_attrs
= true;
6270 /* Handle a "sentinel" attribute. */
6273 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6274 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6276 if (!prototype_p (*node
))
6278 warning (OPT_Wattributes
,
6279 "%qs attribute requires prototypes with named arguments",
6280 IDENTIFIER_POINTER (name
));
6281 *no_add_attrs
= true;
6285 if (!stdarg_p (*node
))
6287 warning (OPT_Wattributes
,
6288 "%qs attribute only applies to variadic functions",
6289 IDENTIFIER_POINTER (name
));
6290 *no_add_attrs
= true;
6296 tree position
= TREE_VALUE (args
);
6298 if (TREE_CODE (position
) != INTEGER_CST
)
6300 warning (0, "requested position is not an integer constant");
6301 *no_add_attrs
= true;
6305 if (tree_int_cst_lt (position
, integer_zero_node
))
6307 warning (0, "requested position is less than zero");
6308 *no_add_attrs
= true;
6316 /* Handle a "noreturn" attribute; arguments as in
6317 struct attribute_spec.handler. */
6320 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6321 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6323 tree type
= TREE_TYPE (*node
);
6325 /* See FIXME comment in c_common_attribute_table. */
6326 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6327 TREE_THIS_VOLATILE (*node
) = 1;
6328 else if (TREE_CODE (type
) == POINTER_TYPE
6329 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6331 = build_pointer_type
6332 (build_type_variant (TREE_TYPE (type
),
6333 TYPE_READONLY (TREE_TYPE (type
)), 1));
6336 warning (OPT_Wattributes
, "%qs attribute ignored",
6337 IDENTIFIER_POINTER (name
));
6338 *no_add_attrs
= true;
6344 /* Handle a "leaf" attribute; arguments as in
6345 struct attribute_spec.handler. */
6348 handle_leaf_attribute (tree
*node
, tree name
,
6349 tree
ARG_UNUSED (args
),
6350 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6352 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6354 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6355 *no_add_attrs
= true;
6357 if (!TREE_PUBLIC (*node
))
6359 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6360 *no_add_attrs
= true;
6366 /* Handle a "malloc" attribute; arguments as in
6367 struct attribute_spec.handler. */
6370 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6371 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6373 if (TREE_CODE (*node
) == FUNCTION_DECL
6374 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6375 DECL_IS_MALLOC (*node
) = 1;
6378 warning (OPT_Wattributes
, "%qs attribute ignored",
6379 IDENTIFIER_POINTER (name
));
6380 *no_add_attrs
= true;
6386 /* Fake handler for attributes we don't properly support. */
6389 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6390 tree
ARG_UNUSED (name
),
6391 tree
ARG_UNUSED (args
),
6392 int ARG_UNUSED (flags
),
6393 bool * ARG_UNUSED (no_add_attrs
))
6398 /* Handle a "type_generic" attribute. */
6401 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6402 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6403 bool * ARG_UNUSED (no_add_attrs
))
6405 /* Ensure we have a function type. */
6406 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6408 /* Ensure we have a variadic function. */
6409 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6414 /* Handle a "vector_size" attribute; arguments as in
6415 struct attribute_spec.handler. */
6418 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6419 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6424 *no_add_attrs
= true;
6426 /* We need to provide for vector pointers, vector arrays, and
6427 functions returning vectors. For example:
6429 __attribute__((vector_size(16))) short *foo;
6431 In this case, the mode is SI, but the type being modified is
6432 HI, so we need to look further. */
6433 while (POINTER_TYPE_P (type
)
6434 || TREE_CODE (type
) == FUNCTION_TYPE
6435 || TREE_CODE (type
) == ARRAY_TYPE
)
6436 type
= TREE_TYPE (type
);
6438 vector_type
= build_vector_type_for_size (type
, TREE_VALUE (args
), name
);
6442 /* Build back pointers if needed. */
6443 *node
= reconstruct_complex_type (*node
, vector_type
);
6448 /* Handle a "vector_type" attribute; arguments as in
6449 struct attribute_spec.handler. */
6452 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6453 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6458 *no_add_attrs
= true;
6460 if (TREE_CODE (type
) != ARRAY_TYPE
)
6462 error ("attribute %qs applies to array types only",
6463 IDENTIFIER_POINTER (name
));
6467 vector_type
= build_vector_type_for_array (type
, name
);
6471 TYPE_REPRESENTATIVE_ARRAY (vector_type
) = type
;
6472 *node
= vector_type
;
6477 /* ----------------------------------------------------------------------- *
6478 * BUILTIN FUNCTIONS *
6479 * ----------------------------------------------------------------------- */
6481 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6482 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6483 if nonansi_p and flag_no_nonansi_builtin. */
6486 def_builtin_1 (enum built_in_function fncode
,
6488 enum built_in_class fnclass
,
6489 tree fntype
, tree libtype
,
6490 bool both_p
, bool fallback_p
,
6491 bool nonansi_p ATTRIBUTE_UNUSED
,
6492 tree fnattrs
, bool implicit_p
)
6495 const char *libname
;
6497 /* Preserve an already installed decl. It most likely was setup in advance
6498 (e.g. as part of the internal builtins) for specific reasons. */
6499 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6502 gcc_assert ((!both_p
&& !fallback_p
)
6503 || !strncmp (name
, "__builtin_",
6504 strlen ("__builtin_")));
6506 libname
= name
+ strlen ("__builtin_");
6507 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6508 (fallback_p
? libname
: NULL
),
6511 /* ??? This is normally further controlled by command-line options
6512 like -fno-builtin, but we don't have them for Ada. */
6513 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6516 set_builtin_decl (fncode
, decl
, implicit_p
);
6519 static int flag_isoc94
= 0;
6520 static int flag_isoc99
= 0;
6521 static int flag_isoc11
= 0;
6523 /* Install what the common builtins.def offers. */
6526 install_builtin_functions (void)
6528 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6529 NONANSI_P, ATTRS, IMPLICIT, COND) \
6531 def_builtin_1 (ENUM, NAME, CLASS, \
6532 builtin_types[(int) TYPE], \
6533 builtin_types[(int) LIBTYPE], \
6534 BOTH_P, FALLBACK_P, NONANSI_P, \
6535 built_in_attributes[(int) ATTRS], IMPLICIT);
6536 #include "builtins.def"
6540 /* ----------------------------------------------------------------------- *
6541 * BUILTIN FUNCTIONS *
6542 * ----------------------------------------------------------------------- */
6544 /* Install the builtin functions we might need. */
6547 gnat_install_builtins (void)
6549 install_builtin_elementary_types ();
6550 install_builtin_function_types ();
6551 install_builtin_attributes ();
6553 /* Install builtins used by generic middle-end pieces first. Some of these
6554 know about internal specificities and control attributes accordingly, for
6555 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6556 the generic definition from builtins.def. */
6557 build_common_builtin_nodes ();
6559 /* Now, install the target specific builtins, such as the AltiVec family on
6560 ppc, and the common set as exposed by builtins.def. */
6561 targetm
.init_builtins ();
6562 install_builtin_functions ();
6565 #include "gt-ada-utils.h"
6566 #include "gtype-ada.h"