1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
33 #include "diagnostic-core.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
42 #include "diagnostic.h"
44 #include "tree-dump.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
62 /* If nonzero, pretend we are allocating at global level. */
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment
;
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment
;
75 /* Tree nodes for the various types and decls we create. */
76 tree gnat_std_decls
[(int) ADT_LAST
];
78 /* Functions to call for each of the possible raise reasons. */
79 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
81 /* Likewise, but with extra info for each of the possible raise reasons. */
82 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
84 /* Forward declarations for handlers of attributes. */
85 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
86 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table
[] =
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute
,
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
112 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
133 { "may_alias", 0, 0, false, true, false, NULL
, false },
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
138 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
141 { NULL
, 0, 0, false, false, false, NULL
, false }
144 /* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
149 #define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
152 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
155 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
158 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
161 #define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
164 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
167 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
170 /* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
176 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
178 /* Likewise for float types, but record these by mode. */
179 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
181 /* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
184 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level
*chain
;
187 /* The BLOCK node for this level. */
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
194 /* The binding level currently in effect. */
195 static GTY(()) struct gnat_binding_level
*current_binding_level
;
197 /* A chain of gnat_binding_level structures awaiting reuse. */
198 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
200 /* The context to be used for global declarations. */
201 static GTY(()) tree global_context
;
203 /* An array of global declarations. */
204 static GTY(()) VEC(tree
,gc
) *global_decls
;
206 /* An array of builtin function declarations. */
207 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
209 /* An array of global renaming pointers. */
210 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
212 /* A chain of unused BLOCK nodes. */
213 static GTY((deletable
)) tree free_block_chain
;
215 static int pad_type_hash_marked_p (const void *p
);
216 static hashval_t
pad_type_hash_hash (const void *p
);
217 static int pad_type_hash_eq (const void *p1
, const void *p2
);
219 /* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221 struct GTY(()) pad_type_hash
{
226 static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash
)))
228 htab_t pad_type_hash_table
;
230 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
231 static tree
compute_related_constant (tree
, tree
);
232 static tree
split_plus (tree
, tree
*);
233 static tree
float_type_for_precision (int, enum machine_mode
);
234 static tree
convert_to_fat_pointer (tree
, tree
);
235 static bool potential_alignment_gap (tree
, tree
, tree
);
236 static void process_attributes (tree
, struct attrib
*);
238 /* Initialize data structures of the utils.c module. */
241 init_gnat_utils (void)
243 /* Initialize the association of GNAT nodes to GCC trees. */
244 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
246 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
247 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
249 /* Initialize the hash table of padded types. */
250 pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash
,
251 pad_type_hash_eq
, 0);
254 /* Destroy data structures of the utils.c module. */
257 destroy_gnat_utils (void)
259 /* Destroy the association of GNAT nodes to GCC trees. */
260 ggc_free (associate_gnat_to_gnu
);
261 associate_gnat_to_gnu
= NULL
;
263 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
264 ggc_free (dummy_node_table
);
265 dummy_node_table
= NULL
;
267 /* Destroy the hash table of padded types. */
268 htab_delete (pad_type_hash_table
);
269 pad_type_hash_table
= NULL
;
271 /* Invalidate the global renaming pointers. */
272 invalidate_global_renaming_pointers ();
275 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
276 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
277 If NO_CHECK is true, the latter check is suppressed.
279 If GNU_DECL is zero, reset a previous association. */
282 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
284 /* Check that GNAT_ENTITY is not already defined and that it is being set
285 to something which is a decl. If that is not the case, this usually
286 means GNAT_ENTITY is defined twice, but occasionally is due to some
288 gcc_assert (!(gnu_decl
289 && (PRESENT_GNU_TREE (gnat_entity
)
290 || (!no_check
&& !DECL_P (gnu_decl
)))));
292 SET_GNU_TREE (gnat_entity
, gnu_decl
);
295 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
296 that was associated with it. If there is no such tree node, abort.
298 In some cases, such as delayed elaboration or expressions that need to
299 be elaborated only once, GNAT_ENTITY is really not an entity. */
302 get_gnu_tree (Entity_Id gnat_entity
)
304 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
305 return GET_GNU_TREE (gnat_entity
);
308 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
311 present_gnu_tree (Entity_Id gnat_entity
)
313 return PRESENT_GNU_TREE (gnat_entity
);
316 /* Make a dummy type corresponding to GNAT_TYPE. */
319 make_dummy_type (Entity_Id gnat_type
)
321 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
324 /* If there is an equivalent type, get its underlying type. */
325 if (Present (gnat_underlying
))
326 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
328 /* If there was no equivalent type (can only happen when just annotating
329 types) or underlying type, go back to the original type. */
330 if (No (gnat_underlying
))
331 gnat_underlying
= gnat_type
;
333 /* If it there already a dummy type, use that one. Else make one. */
334 if (PRESENT_DUMMY_NODE (gnat_underlying
))
335 return GET_DUMMY_NODE (gnat_underlying
);
337 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
339 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
340 ? tree_code_for_record_type (gnat_underlying
)
342 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
343 TYPE_DUMMY_P (gnu_type
) = 1;
344 TYPE_STUB_DECL (gnu_type
)
345 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
346 if (Is_By_Reference_Type (gnat_underlying
))
347 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
349 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
354 /* Return the dummy type that was made for GNAT_TYPE, if any. */
357 get_dummy_type (Entity_Id gnat_type
)
359 return GET_DUMMY_NODE (gnat_type
);
362 /* Build dummy fat and thin pointer types whose designated type is specified
363 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
366 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
368 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
369 tree gnu_fat_type
, fields
, gnu_object_type
;
371 gnu_template_type
= make_node (RECORD_TYPE
);
372 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
373 TYPE_DUMMY_P (gnu_template_type
) = 1;
374 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
376 gnu_array_type
= make_node (ENUMERAL_TYPE
);
377 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
378 TYPE_DUMMY_P (gnu_array_type
) = 1;
379 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
381 gnu_fat_type
= make_node (RECORD_TYPE
);
382 /* Build a stub DECL to trigger the special processing for fat pointer types
384 TYPE_NAME (gnu_fat_type
)
385 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
387 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
388 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
390 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
391 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
392 finish_fat_pointer_type (gnu_fat_type
, fields
);
393 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
394 /* Suppress debug info until after the type is completed. */
395 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
397 gnu_object_type
= make_node (RECORD_TYPE
);
398 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
399 TYPE_DUMMY_P (gnu_object_type
) = 1;
401 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
402 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
405 /* Return true if we are in the global binding level. */
408 global_bindings_p (void)
410 return force_global
|| current_function_decl
== NULL_TREE
;
413 /* Enter a new binding level. */
416 gnat_pushlevel (void)
418 struct gnat_binding_level
*newlevel
= NULL
;
420 /* Reuse a struct for this binding level, if there is one. */
421 if (free_binding_level
)
423 newlevel
= free_binding_level
;
424 free_binding_level
= free_binding_level
->chain
;
427 newlevel
= ggc_alloc_gnat_binding_level ();
429 /* Use a free BLOCK, if any; otherwise, allocate one. */
430 if (free_block_chain
)
432 newlevel
->block
= free_block_chain
;
433 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
434 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
437 newlevel
->block
= make_node (BLOCK
);
439 /* Point the BLOCK we just made to its parent. */
440 if (current_binding_level
)
441 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
443 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
444 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
445 TREE_USED (newlevel
->block
) = 1;
447 /* Add this level to the front of the chain (stack) of active levels. */
448 newlevel
->chain
= current_binding_level
;
449 newlevel
->jmpbuf_decl
= NULL_TREE
;
450 current_binding_level
= newlevel
;
453 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
454 and point FNDECL to this BLOCK. */
457 set_current_block_context (tree fndecl
)
459 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
460 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
461 set_block_for_group (current_binding_level
->block
);
464 /* Set the jmpbuf_decl for the current binding level to DECL. */
467 set_block_jmpbuf_decl (tree decl
)
469 current_binding_level
->jmpbuf_decl
= decl
;
472 /* Get the jmpbuf_decl, if any, for the current binding level. */
475 get_block_jmpbuf_decl (void)
477 return current_binding_level
->jmpbuf_decl
;
480 /* Exit a binding level. Set any BLOCK into the current code group. */
485 struct gnat_binding_level
*level
= current_binding_level
;
486 tree block
= level
->block
;
488 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
489 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
491 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
492 are no variables free the block and merge its subblocks into those of its
493 parent block. Otherwise, add it to the list of its parent. */
494 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
496 else if (BLOCK_VARS (block
) == NULL_TREE
)
498 BLOCK_SUBBLOCKS (level
->chain
->block
)
499 = block_chainon (BLOCK_SUBBLOCKS (block
),
500 BLOCK_SUBBLOCKS (level
->chain
->block
));
501 BLOCK_CHAIN (block
) = free_block_chain
;
502 free_block_chain
= block
;
506 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
507 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
508 TREE_USED (block
) = 1;
509 set_block_for_group (block
);
512 /* Free this binding structure. */
513 current_binding_level
= level
->chain
;
514 level
->chain
= free_binding_level
;
515 free_binding_level
= level
;
518 /* Exit a binding level and discard the associated BLOCK. */
523 struct gnat_binding_level
*level
= current_binding_level
;
524 tree block
= level
->block
;
526 BLOCK_CHAIN (block
) = free_block_chain
;
527 free_block_chain
= block
;
529 /* Free this binding structure. */
530 current_binding_level
= level
->chain
;
531 level
->chain
= free_binding_level
;
532 free_binding_level
= level
;
535 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
536 for location information and flag propagation. */
539 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
541 /* If DECL is public external or at top level, it has global context. */
542 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
545 global_context
= build_translation_unit_decl (NULL_TREE
);
546 DECL_CONTEXT (decl
) = global_context
;
550 DECL_CONTEXT (decl
) = current_function_decl
;
552 /* Functions imported in another function are not really nested.
553 For really nested functions mark them initially as needing
554 a static chain for uses of that flag before unnesting;
555 lower_nested_functions will then recompute it. */
556 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
557 DECL_STATIC_CHAIN (decl
) = 1;
560 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
562 /* Set the location of DECL and emit a declaration for it. */
563 if (Present (gnat_node
))
564 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
566 add_decl_expr (decl
, gnat_node
);
568 /* Put the declaration on the list. The list of declarations is in reverse
569 order. The list will be reversed later. Put global declarations in the
570 globals list and local ones in the current block. But skip TYPE_DECLs
571 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
572 with the debugger and aren't needed anyway. */
573 if (!(TREE_CODE (decl
) == TYPE_DECL
574 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
576 if (DECL_EXTERNAL (decl
))
578 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
579 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
581 else if (global_bindings_p ())
582 VEC_safe_push (tree
, gc
, global_decls
, decl
);
585 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
586 BLOCK_VARS (current_binding_level
->block
) = decl
;
590 /* For the declaration of a type, set its name if it either is not already
591 set or if the previous type name was not derived from a source name.
592 We'd rather have the type named with a real name and all the pointer
593 types to the same object have the same POINTER_TYPE node. Code in the
594 equivalent function of c-decl.c makes a copy of the type node here, but
595 that may cause us trouble with incomplete types. We make an exception
596 for fat pointer types because the compiler automatically builds them
597 for unconstrained array types and the debugger uses them to represent
598 both these and pointers to these. */
599 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
601 tree t
= TREE_TYPE (decl
);
603 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
605 /* Array and pointer types aren't "tagged" types so we force the
606 type to be associated with its typedef in the DWARF back-end,
607 in order to make sure that the latter is always preserved. */
608 if (!DECL_ARTIFICIAL (decl
)
609 && (TREE_CODE (t
) == ARRAY_TYPE
610 || TREE_CODE (t
) == POINTER_TYPE
))
612 tree tt
= build_distinct_type_copy (t
);
613 if (TREE_CODE (t
) == POINTER_TYPE
)
614 TYPE_NEXT_PTR_TO (t
) = tt
;
615 TYPE_NAME (tt
) = DECL_NAME (decl
);
616 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
617 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
618 DECL_ORIGINAL_TYPE (decl
) = tt
;
621 else if (TYPE_IS_FAT_POINTER_P (t
))
623 /* We need a variant for the placeholder machinery to work. */
624 tree tt
= build_variant_type_copy (t
);
625 TYPE_NAME (tt
) = decl
;
626 TYPE_CONTEXT (tt
) = DECL_CONTEXT (decl
);
627 TREE_USED (tt
) = TREE_USED (t
);
628 TREE_TYPE (decl
) = tt
;
629 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
630 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
632 DECL_ORIGINAL_TYPE (decl
) = t
;
633 DECL_ARTIFICIAL (decl
) = 0;
636 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
641 /* Propagate the name to all the anonymous variants. This is needed
642 for the type qualifiers machinery to work properly. */
644 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
645 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
647 TYPE_NAME (t
) = decl
;
648 TYPE_CONTEXT (t
) = DECL_CONTEXT (decl
);
653 /* Create a record type that contains a SIZE bytes long field of TYPE with a
654 starting bit position so that it is aligned to ALIGN bits, and leaving at
655 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
656 record is guaranteed to get. */
659 make_aligning_type (tree type
, unsigned int align
, tree size
,
660 unsigned int base_align
, int room
)
662 /* We will be crafting a record type with one field at a position set to be
663 the next multiple of ALIGN past record'address + room bytes. We use a
664 record placeholder to express record'address. */
665 tree record_type
= make_node (RECORD_TYPE
);
666 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
669 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
671 /* The diagram below summarizes the shape of what we manipulate:
673 <--------- pos ---------->
674 { +------------+-------------+-----------------+
675 record =>{ |############| ... | field (type) |
676 { +------------+-------------+-----------------+
677 |<-- room -->|<- voffset ->|<---- size ----->|
680 record_addr vblock_addr
682 Every length is in sizetype bytes there, except "pos" which has to be
683 set as a bit position in the GCC tree for the record. */
684 tree room_st
= size_int (room
);
685 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
686 tree voffset_st
, pos
, field
;
688 tree name
= TYPE_NAME (type
);
690 if (TREE_CODE (name
) == TYPE_DECL
)
691 name
= DECL_NAME (name
);
692 name
= concat_name (name
, "ALIGN");
693 TYPE_NAME (record_type
) = name
;
695 /* Compute VOFFSET and then POS. The next byte position multiple of some
696 alignment after some address is obtained by "and"ing the alignment minus
697 1 with the two's complement of the address. */
698 voffset_st
= size_binop (BIT_AND_EXPR
,
699 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
700 size_int ((align
/ BITS_PER_UNIT
) - 1));
702 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
703 pos
= size_binop (MULT_EXPR
,
704 convert (bitsizetype
,
705 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
708 /* Craft the GCC record representation. We exceptionally do everything
709 manually here because 1) our generic circuitry is not quite ready to
710 handle the complex position/size expressions we are setting up, 2) we
711 have a strong simplifying factor at hand: we know the maximum possible
712 value of voffset, and 3) we have to set/reset at least the sizes in
713 accordance with this maximum value anyway, as we need them to convey
714 what should be "alloc"ated for this type.
716 Use -1 as the 'addressable' indication for the field to prevent the
717 creation of a bitfield. We don't need one, it would have damaging
718 consequences on the alignment computation, and create_field_decl would
719 make one without this special argument, for instance because of the
720 complex position expression. */
721 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
723 TYPE_FIELDS (record_type
) = field
;
725 TYPE_ALIGN (record_type
) = base_align
;
726 TYPE_USER_ALIGN (record_type
) = 1;
728 TYPE_SIZE (record_type
)
729 = size_binop (PLUS_EXPR
,
730 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
732 bitsize_int (align
+ room
* BITS_PER_UNIT
));
733 TYPE_SIZE_UNIT (record_type
)
734 = size_binop (PLUS_EXPR
, size
,
735 size_int (room
+ align
/ BITS_PER_UNIT
));
737 SET_TYPE_MODE (record_type
, BLKmode
);
738 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
740 /* Declare it now since it will never be declared otherwise. This is
741 necessary to ensure that its subtrees are properly marked. */
742 create_type_decl (name
, record_type
, NULL
, true, false, Empty
);
747 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
748 as the field type of a packed record if IN_RECORD is true, or as the
749 component type of a packed array if IN_RECORD is false. See if we can
750 rewrite it either as a type that has a non-BLKmode, which we can pack
751 tighter in the packed record case, or as a smaller type. If so, return
752 the new type. If not, return the original type. */
755 make_packable_type (tree type
, bool in_record
)
757 unsigned HOST_WIDE_INT size
= tree_low_cst (TYPE_SIZE (type
), 1);
758 unsigned HOST_WIDE_INT new_size
;
759 tree new_type
, old_field
, field_list
= NULL_TREE
;
762 /* No point in doing anything if the size is zero. */
766 new_type
= make_node (TREE_CODE (type
));
768 /* Copy the name and flags from the old type to that of the new.
769 Note that we rely on the pointer equality created here for
770 TYPE_NAME to look through conversions in various places. */
771 TYPE_NAME (new_type
) = TYPE_NAME (type
);
772 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
773 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
774 if (TREE_CODE (type
) == RECORD_TYPE
)
775 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
777 /* If we are in a record and have a small size, set the alignment to
778 try for an integral mode. Otherwise set it to try for a smaller
779 type with BLKmode. */
780 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
782 align
= ceil_pow2 (size
);
783 TYPE_ALIGN (new_type
) = align
;
784 new_size
= (size
+ align
- 1) & -align
;
788 unsigned HOST_WIDE_INT align
;
790 /* Do not try to shrink the size if the RM size is not constant. */
791 if (TYPE_CONTAINS_TEMPLATE_P (type
)
792 || !host_integerp (TYPE_ADA_SIZE (type
), 1))
795 /* Round the RM size up to a unit boundary to get the minimal size
796 for a BLKmode record. Give up if it's already the size. */
797 new_size
= TREE_INT_CST_LOW (TYPE_ADA_SIZE (type
));
798 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
799 if (new_size
== size
)
802 align
= new_size
& -new_size
;
803 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
806 TYPE_USER_ALIGN (new_type
) = 1;
808 /* Now copy the fields, keeping the position and size as we don't want
809 to change the layout by propagating the packedness downwards. */
810 for (old_field
= TYPE_FIELDS (type
); old_field
;
811 old_field
= DECL_CHAIN (old_field
))
813 tree new_field_type
= TREE_TYPE (old_field
);
814 tree new_field
, new_size
;
816 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
817 && !TYPE_FAT_POINTER_P (new_field_type
)
818 && host_integerp (TYPE_SIZE (new_field_type
), 1))
819 new_field_type
= make_packable_type (new_field_type
, true);
821 /* However, for the last field in a not already packed record type
822 that is of an aggregate type, we need to use the RM size in the
823 packable version of the record type, see finish_record_type. */
824 if (!DECL_CHAIN (old_field
)
825 && !TYPE_PACKED (type
)
826 && RECORD_OR_UNION_TYPE_P (new_field_type
)
827 && !TYPE_FAT_POINTER_P (new_field_type
)
828 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
829 && TYPE_ADA_SIZE (new_field_type
))
830 new_size
= TYPE_ADA_SIZE (new_field_type
);
832 new_size
= DECL_SIZE (old_field
);
835 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
836 new_size
, bit_position (old_field
),
838 !DECL_NONADDRESSABLE_P (old_field
));
840 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
841 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
842 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
843 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
845 DECL_CHAIN (new_field
) = field_list
;
846 field_list
= new_field
;
849 finish_record_type (new_type
, nreverse (field_list
), 2, false);
850 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
851 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
852 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
854 /* If this is a padding record, we never want to make the size smaller
855 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
856 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
858 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
859 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
864 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
865 TYPE_SIZE_UNIT (new_type
)
866 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
869 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
870 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
872 compute_record_mode (new_type
);
874 /* Try harder to get a packable type if necessary, for example
875 in case the record itself contains a BLKmode field. */
876 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
877 SET_TYPE_MODE (new_type
,
878 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
880 /* If neither the mode nor the size has shrunk, return the old type. */
881 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
887 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
888 If TYPE is the best type, return it. Otherwise, make a new type. We
889 only support new integral and pointer types. FOR_BIASED is true if
890 we are making a biased type. */
893 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
895 unsigned HOST_WIDE_INT size
;
899 /* If size indicates an error, just return TYPE to avoid propagating
900 the error. Likewise if it's too large to represent. */
901 if (!size_tree
|| !host_integerp (size_tree
, 1))
904 size
= tree_low_cst (size_tree
, 1);
906 switch (TREE_CODE (type
))
911 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
912 && TYPE_BIASED_REPRESENTATION_P (type
));
914 /* Integer types with precision 0 are forbidden. */
918 /* Only do something if the type isn't a packed array type and doesn't
919 already have the proper size and the size isn't too large. */
920 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
921 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
922 || size
> LONG_LONG_TYPE_SIZE
)
925 biased_p
|= for_biased
;
926 if (TYPE_UNSIGNED (type
) || biased_p
)
927 new_type
= make_unsigned_type (size
);
929 new_type
= make_signed_type (size
);
930 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
931 SET_TYPE_RM_MIN_VALUE (new_type
,
932 convert (TREE_TYPE (new_type
),
933 TYPE_MIN_VALUE (type
)));
934 SET_TYPE_RM_MAX_VALUE (new_type
,
935 convert (TREE_TYPE (new_type
),
936 TYPE_MAX_VALUE (type
)));
937 /* Copy the name to show that it's essentially the same type and
938 not a subrange type. */
939 TYPE_NAME (new_type
) = TYPE_NAME (type
);
940 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
941 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
945 /* Do something if this is a fat pointer, in which case we
946 may need to return the thin pointer. */
947 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
949 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
950 if (!targetm
.valid_pointer_mode (p_mode
))
953 build_pointer_type_for_mode
954 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
960 /* Only do something if this is a thin pointer, in which case we
961 may need to return the fat pointer. */
962 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
964 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
974 /* See if the data pointed to by the hash table slot is marked. */
977 pad_type_hash_marked_p (const void *p
)
979 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
981 return ggc_marked_p (type
);
984 /* Return the cached hash value. */
987 pad_type_hash_hash (const void *p
)
989 return ((const struct pad_type_hash
*) p
)->hash
;
992 /* Return 1 iff the padded types are equivalent. */
995 pad_type_hash_eq (const void *p1
, const void *p2
)
997 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
998 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
1001 if (t1
->hash
!= t2
->hash
)
1007 /* We consider that the padded types are equivalent if they pad the same
1008 type and have the same size, alignment and RM size. Taking the mode
1009 into account is redundant since it is determined by the others. */
1011 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1012 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1013 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1014 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1017 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1018 if needed. We have already verified that SIZE and TYPE are large enough.
1019 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1020 IS_COMPONENT_TYPE is true if this is being done for the component type of
1021 an array. IS_USER_TYPE is true if the original type needs to be completed.
1022 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1023 the RM size of the resulting type is to be set to SIZE too. */
1026 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1027 Entity_Id gnat_entity
, bool is_component_type
,
1028 bool is_user_type
, bool definition
, bool set_rm_size
)
1030 tree orig_size
= TYPE_SIZE (type
);
1033 /* If TYPE is a padded type, see if it agrees with any size and alignment
1034 we were given. If so, return the original type. Otherwise, strip
1035 off the padding, since we will either be returning the inner type
1036 or repadding it. If no size or alignment is specified, use that of
1037 the original padded type. */
1038 if (TYPE_IS_PADDING_P (type
))
1041 || operand_equal_p (round_up (size
,
1042 MAX (align
, TYPE_ALIGN (type
))),
1043 round_up (TYPE_SIZE (type
),
1044 MAX (align
, TYPE_ALIGN (type
))),
1046 && (align
== 0 || align
== TYPE_ALIGN (type
)))
1050 size
= TYPE_SIZE (type
);
1052 align
= TYPE_ALIGN (type
);
1054 type
= TREE_TYPE (TYPE_FIELDS (type
));
1055 orig_size
= TYPE_SIZE (type
);
1058 /* If the size is either not being changed or is being made smaller (which
1059 is not done here and is only valid for bitfields anyway), show the size
1060 isn't changing. Likewise, clear the alignment if it isn't being
1061 changed. Then return if we aren't doing anything. */
1063 && (operand_equal_p (size
, orig_size
, 0)
1064 || (TREE_CODE (orig_size
) == INTEGER_CST
1065 && tree_int_cst_lt (size
, orig_size
))))
1068 if (align
== TYPE_ALIGN (type
))
1071 if (align
== 0 && !size
)
1074 /* If requested, complete the original type and give it a name. */
1076 create_type_decl (get_entity_name (gnat_entity
), type
,
1077 NULL
, !Comes_From_Source (gnat_entity
),
1079 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1080 && DECL_IGNORED_P (TYPE_NAME (type
))),
1083 /* We used to modify the record in place in some cases, but that could
1084 generate incorrect debugging information. So make a new record
1086 record
= make_node (RECORD_TYPE
);
1087 TYPE_PADDING_P (record
) = 1;
1089 if (Present (gnat_entity
))
1090 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1092 TYPE_ALIGN (record
) = align
;
1093 TYPE_SIZE (record
) = size
? size
: orig_size
;
1094 TYPE_SIZE_UNIT (record
)
1095 = convert (sizetype
,
1096 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1097 bitsize_unit_node
));
1099 /* If we are changing the alignment and the input type is a record with
1100 BLKmode and a small constant size, try to make a form that has an
1101 integral mode. This might allow the padding record to also have an
1102 integral mode, which will be much more efficient. There is no point
1103 in doing so if a size is specified unless it is also a small constant
1104 size and it is incorrect to do so if we cannot guarantee that the mode
1105 will be naturally aligned since the field must always be addressable.
1107 ??? This might not always be a win when done for a stand-alone object:
1108 since the nominal and the effective type of the object will now have
1109 different modes, a VIEW_CONVERT_EXPR will be required for converting
1110 between them and it might be hard to overcome afterwards, including
1111 at the RTL level when the stand-alone object is accessed as a whole. */
1113 && RECORD_OR_UNION_TYPE_P (type
)
1114 && TYPE_MODE (type
) == BLKmode
1115 && !TYPE_BY_REFERENCE_P (type
)
1116 && TREE_CODE (orig_size
) == INTEGER_CST
1117 && !TREE_OVERFLOW (orig_size
)
1118 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1120 || (TREE_CODE (size
) == INTEGER_CST
1121 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1123 tree packable_type
= make_packable_type (type
, true);
1124 if (TYPE_MODE (packable_type
) != BLKmode
1125 && align
>= TYPE_ALIGN (packable_type
))
1126 type
= packable_type
;
1129 /* Now create the field with the original size. */
1130 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1131 bitsize_zero_node
, 0, 1);
1132 DECL_INTERNAL_P (field
) = 1;
1134 /* Do not emit debug info until after the auxiliary record is built. */
1135 finish_record_type (record
, field
, 1, false);
1137 /* Set the RM size if requested. */
1140 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1142 /* If the padded type is complete and has constant size, we canonicalize
1143 it by means of the hash table. This is consistent with the language
1144 semantics and ensures that gigi and the middle-end have a common view
1145 of these padded types. */
1146 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1149 struct pad_type_hash in
, *h
;
1152 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1153 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1154 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1155 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1159 h
= (struct pad_type_hash
*)
1160 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1167 h
= ggc_alloc_pad_type_hash ();
1170 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1176 /* Unless debugging information isn't being written for the input type,
1177 write a record that shows what we are a subtype of and also make a
1178 variable that indicates our size, if still variable. */
1179 if (TREE_CODE (orig_size
) != INTEGER_CST
1180 && TYPE_NAME (record
)
1182 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1183 && DECL_IGNORED_P (TYPE_NAME (type
))))
1185 tree marker
= make_node (RECORD_TYPE
);
1186 tree name
= TYPE_NAME (record
);
1187 tree orig_name
= TYPE_NAME (type
);
1189 if (TREE_CODE (name
) == TYPE_DECL
)
1190 name
= DECL_NAME (name
);
1192 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1193 orig_name
= DECL_NAME (orig_name
);
1195 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1196 finish_record_type (marker
,
1197 create_field_decl (orig_name
,
1198 build_reference_type (type
),
1199 marker
, NULL_TREE
, NULL_TREE
,
1203 add_parallel_type (record
, marker
);
1205 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1206 TYPE_SIZE_UNIT (marker
)
1207 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1208 TYPE_SIZE_UNIT (record
), false, false, false,
1209 false, NULL
, gnat_entity
);
1212 rest_of_record_type_compilation (record
);
1215 /* If the size was widened explicitly, maybe give a warning. Take the
1216 original size as the maximum size of the input if there was an
1217 unconstrained record involved and round it up to the specified alignment,
1218 if one was specified. But don't do it if we are just annotating types
1219 and the type is tagged, since tagged types aren't fully laid out in this
1222 || TREE_CODE (size
) == COND_EXPR
1223 || TREE_CODE (size
) == MAX_EXPR
1225 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1228 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1229 orig_size
= max_size (orig_size
, true);
1232 orig_size
= round_up (orig_size
, align
);
1234 if (!operand_equal_p (size
, orig_size
, 0)
1235 && !(TREE_CODE (size
) == INTEGER_CST
1236 && TREE_CODE (orig_size
) == INTEGER_CST
1237 && (TREE_OVERFLOW (size
)
1238 || TREE_OVERFLOW (orig_size
)
1239 || tree_int_cst_lt (size
, orig_size
))))
1241 Node_Id gnat_error_node
= Empty
;
1243 if (Is_Packed_Array_Type (gnat_entity
))
1244 gnat_entity
= Original_Array_Type (gnat_entity
);
1246 if ((Ekind (gnat_entity
) == E_Component
1247 || Ekind (gnat_entity
) == E_Discriminant
)
1248 && Present (Component_Clause (gnat_entity
)))
1249 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1250 else if (Present (Size_Clause (gnat_entity
)))
1251 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1253 /* Generate message only for entities that come from source, since
1254 if we have an entity created by expansion, the message will be
1255 generated for some other corresponding source entity. */
1256 if (Comes_From_Source (gnat_entity
))
1258 if (Present (gnat_error_node
))
1259 post_error_ne_tree ("{^ }bits of & unused?",
1260 gnat_error_node
, gnat_entity
,
1261 size_diffop (size
, orig_size
));
1262 else if (is_component_type
)
1263 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1264 gnat_entity
, gnat_entity
,
1265 size_diffop (size
, orig_size
));
1272 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1273 If this is a multi-dimensional array type, do this recursively.
1276 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1277 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1278 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1281 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1283 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1284 of a one-dimensional array, since the padding has the same alias set
1285 as the field type, but if it's a multi-dimensional array, we need to
1286 see the inner types. */
1287 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1288 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1289 || TYPE_PADDING_P (gnu_old_type
)))
1290 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1292 /* Unconstrained array types are deemed incomplete and would thus be given
1293 alias set 0. Retrieve the underlying array type. */
1294 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1296 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1297 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1299 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1301 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1302 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1303 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1304 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1308 case ALIAS_SET_COPY
:
1309 /* The alias set shouldn't be copied between array types with different
1310 aliasing settings because this can break the aliasing relationship
1311 between the array type and its element type. */
1312 #ifndef ENABLE_CHECKING
1313 if (flag_strict_aliasing
)
1315 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1316 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1317 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1318 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1320 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1323 case ALIAS_SET_SUBSET
:
1324 case ALIAS_SET_SUPERSET
:
1326 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1327 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1329 /* Do nothing if the alias sets conflict. This ensures that we
1330 never call record_alias_subset several times for the same pair
1331 or at all for alias set 0. */
1332 if (!alias_sets_conflict_p (old_set
, new_set
))
1334 if (op
== ALIAS_SET_SUBSET
)
1335 record_alias_subset (old_set
, new_set
);
1337 record_alias_subset (new_set
, old_set
);
1346 record_component_aliases (gnu_new_type
);
1349 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1350 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1353 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1355 tree type_decl
= build_decl (input_location
,
1356 TYPE_DECL
, get_identifier (name
), type
);
1357 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1358 TYPE_ARTIFICIAL (type
) = artificial_p
;
1359 gnat_pushdecl (type_decl
, Empty
);
1361 if (debug_hooks
->type_decl
)
1362 debug_hooks
->type_decl (type_decl
, false);
1365 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1366 finish constructing the record type as a fat pointer type. */
1369 finish_fat_pointer_type (tree record_type
, tree field_list
)
1371 /* Make sure we can put it into a register. */
1372 if (STRICT_ALIGNMENT
)
1373 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1375 /* Show what it really is. */
1376 TYPE_FAT_POINTER_P (record_type
) = 1;
1378 /* Do not emit debug info for it since the types of its fields may still be
1379 incomplete at this point. */
1380 finish_record_type (record_type
, field_list
, 0, false);
1382 /* Force type_contains_placeholder_p to return true on it. Although the
1383 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1384 type but the representation of the unconstrained array. */
1385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1388 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1389 finish constructing the record or union type. If REP_LEVEL is zero, this
1390 record has no representation clause and so will be entirely laid out here.
1391 If REP_LEVEL is one, this record has a representation clause and has been
1392 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1393 this record is derived from a parent record and thus inherits its layout;
1394 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1395 we need to write debug information about this type. */
1398 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1401 enum tree_code code
= TREE_CODE (record_type
);
1402 tree name
= TYPE_NAME (record_type
);
1403 tree ada_size
= bitsize_zero_node
;
1404 tree size
= bitsize_zero_node
;
1405 bool had_size
= TYPE_SIZE (record_type
) != 0;
1406 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1407 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1410 TYPE_FIELDS (record_type
) = field_list
;
1412 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1413 generate debug info and have a parallel type. */
1414 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
1415 name
= DECL_NAME (name
);
1416 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1418 /* Globally initialize the record first. If this is a rep'ed record,
1419 that just means some initializations; otherwise, layout the record. */
1422 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1425 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1428 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1430 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1431 out just like a UNION_TYPE, since the size will be fixed. */
1432 else if (code
== QUAL_UNION_TYPE
)
1437 /* Ensure there isn't a size already set. There can be in an error
1438 case where there is a rep clause but all fields have errors and
1439 no longer have a position. */
1440 TYPE_SIZE (record_type
) = 0;
1442 /* Ensure we use the traditional GCC layout for bitfields when we need
1443 to pack the record type or have a representation clause. The other
1444 possible layout (Microsoft C compiler), if available, would prevent
1445 efficient packing in almost all cases. */
1446 #ifdef TARGET_MS_BITFIELD_LAYOUT
1447 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1448 decl_attributes (&record_type
,
1449 tree_cons (get_identifier ("gcc_struct"),
1450 NULL_TREE
, NULL_TREE
),
1451 ATTR_FLAG_TYPE_IN_PLACE
);
1454 layout_type (record_type
);
1457 /* At this point, the position and size of each field is known. It was
1458 either set before entry by a rep clause, or by laying out the type above.
1460 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1461 to compute the Ada size; the GCC size and alignment (for rep'ed records
1462 that are not padding types); and the mode (for rep'ed records). We also
1463 clear the DECL_BIT_FIELD indication for the cases we know have not been
1464 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1466 if (code
== QUAL_UNION_TYPE
)
1467 field_list
= nreverse (field_list
);
1469 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1471 tree type
= TREE_TYPE (field
);
1472 tree pos
= bit_position (field
);
1473 tree this_size
= DECL_SIZE (field
);
1476 if (RECORD_OR_UNION_TYPE_P (type
)
1477 && !TYPE_FAT_POINTER_P (type
)
1478 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1479 && TYPE_ADA_SIZE (type
))
1480 this_ada_size
= TYPE_ADA_SIZE (type
);
1482 this_ada_size
= this_size
;
1484 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1485 if (DECL_BIT_FIELD (field
)
1486 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1488 unsigned int align
= TYPE_ALIGN (type
);
1490 /* In the general case, type alignment is required. */
1491 if (value_factor_p (pos
, align
))
1493 /* The enclosing record type must be sufficiently aligned.
1494 Otherwise, if no alignment was specified for it and it
1495 has been laid out already, bump its alignment to the
1496 desired one if this is compatible with its size. */
1497 if (TYPE_ALIGN (record_type
) >= align
)
1499 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1500 DECL_BIT_FIELD (field
) = 0;
1504 && value_factor_p (TYPE_SIZE (record_type
), align
))
1506 TYPE_ALIGN (record_type
) = align
;
1507 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1508 DECL_BIT_FIELD (field
) = 0;
1512 /* In the non-strict alignment case, only byte alignment is. */
1513 if (!STRICT_ALIGNMENT
1514 && DECL_BIT_FIELD (field
)
1515 && value_factor_p (pos
, BITS_PER_UNIT
))
1516 DECL_BIT_FIELD (field
) = 0;
1519 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1520 field is technically not addressable. Except that it can actually
1521 be addressed if it is BLKmode and happens to be properly aligned. */
1522 if (DECL_BIT_FIELD (field
)
1523 && !(DECL_MODE (field
) == BLKmode
1524 && value_factor_p (pos
, BITS_PER_UNIT
)))
1525 DECL_NONADDRESSABLE_P (field
) = 1;
1527 /* A type must be as aligned as its most aligned field that is not
1528 a bit-field. But this is already enforced by layout_type. */
1529 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1530 TYPE_ALIGN (record_type
)
1531 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1536 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1537 size
= size_binop (MAX_EXPR
, size
, this_size
);
1540 case QUAL_UNION_TYPE
:
1542 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1543 this_ada_size
, ada_size
);
1544 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1549 /* Since we know here that all fields are sorted in order of
1550 increasing bit position, the size of the record is one
1551 higher than the ending bit of the last field processed
1552 unless we have a rep clause, since in that case we might
1553 have a field outside a QUAL_UNION_TYPE that has a higher ending
1554 position. So use a MAX in that case. Also, if this field is a
1555 QUAL_UNION_TYPE, we need to take into account the previous size in
1556 the case of empty variants. */
1558 = merge_sizes (ada_size
, pos
, this_ada_size
,
1559 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1561 = merge_sizes (size
, pos
, this_size
,
1562 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1570 if (code
== QUAL_UNION_TYPE
)
1571 nreverse (field_list
);
1575 /* If this is a padding record, we never want to make the size smaller
1576 than what was specified in it, if any. */
1577 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1578 size
= TYPE_SIZE (record_type
);
1580 /* Now set any of the values we've just computed that apply. */
1581 if (!TYPE_FAT_POINTER_P (record_type
)
1582 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1583 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1587 tree size_unit
= had_size_unit
1588 ? TYPE_SIZE_UNIT (record_type
)
1589 : convert (sizetype
,
1590 size_binop (CEIL_DIV_EXPR
, size
,
1591 bitsize_unit_node
));
1592 unsigned int align
= TYPE_ALIGN (record_type
);
1594 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1595 TYPE_SIZE_UNIT (record_type
)
1596 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1598 compute_record_mode (record_type
);
1603 rest_of_record_type_compilation (record_type
);
1606 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1609 add_parallel_type (tree type
, tree parallel_type
)
1611 tree decl
= TYPE_STUB_DECL (type
);
1613 while (DECL_PARALLEL_TYPE (decl
))
1614 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1616 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1619 /* Return true if TYPE has a parallel type. */
1622 has_parallel_type (tree type
)
1624 tree decl
= TYPE_STUB_DECL (type
);
1626 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1629 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1630 associated with it. It need not be invoked directly in most cases since
1631 finish_record_type takes care of doing so, but this can be necessary if
1632 a parallel type is to be attached to the record type. */
1635 rest_of_record_type_compilation (tree record_type
)
1637 bool var_size
= false;
1640 /* If this is a padded type, the bulk of the debug info has already been
1641 generated for the field's type. */
1642 if (TYPE_IS_PADDING_P (record_type
))
1645 /* If the type already has a parallel type (XVS type), then we're done. */
1646 if (has_parallel_type (record_type
))
1649 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1651 /* We need to make an XVE/XVU record if any field has variable size,
1652 whether or not the record does. For example, if we have a union,
1653 it may be that all fields, rounded up to the alignment, have the
1654 same size, in which case we'll use that size. But the debug
1655 output routines (except Dwarf2) won't be able to output the fields,
1656 so we need to make the special record. */
1657 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1658 /* If a field has a non-constant qualifier, the record will have
1659 variable size too. */
1660 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1661 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1668 /* If this record type is of variable size, make a parallel record type that
1669 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1672 tree new_record_type
1673 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1674 ? UNION_TYPE
: TREE_CODE (record_type
));
1675 tree orig_name
= TYPE_NAME (record_type
), new_name
;
1676 tree last_pos
= bitsize_zero_node
;
1677 tree old_field
, prev_old_field
= NULL_TREE
;
1679 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1680 orig_name
= DECL_NAME (orig_name
);
1683 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1685 TYPE_NAME (new_record_type
) = new_name
;
1686 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1687 TYPE_STUB_DECL (new_record_type
)
1688 = create_type_stub_decl (new_name
, new_record_type
);
1689 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1690 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1691 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1692 TYPE_SIZE_UNIT (new_record_type
)
1693 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1695 /* Now scan all the fields, replacing each field with a new
1696 field corresponding to the new encoding. */
1697 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1698 old_field
= DECL_CHAIN (old_field
))
1700 tree field_type
= TREE_TYPE (old_field
);
1701 tree field_name
= DECL_NAME (old_field
);
1703 tree curpos
= bit_position (old_field
);
1705 unsigned int align
= 0;
1708 /* See how the position was modified from the last position.
1710 There are two basic cases we support: a value was added
1711 to the last position or the last position was rounded to
1712 a boundary and they something was added. Check for the
1713 first case first. If not, see if there is any evidence
1714 of rounding. If so, round the last position and try
1717 If this is a union, the position can be taken as zero. */
1719 /* Some computations depend on the shape of the position expression,
1720 so strip conversions to make sure it's exposed. */
1721 curpos
= remove_conversions (curpos
, true);
1723 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1724 pos
= bitsize_zero_node
, align
= 0;
1726 pos
= compute_related_constant (curpos
, last_pos
);
1728 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
1729 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
1731 tree offset
= TREE_OPERAND (curpos
, 0);
1732 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
1734 /* An offset which is a bitwise AND with a negative power of 2
1735 means an alignment corresponding to this power of 2. Note
1736 that, as sizetype is sign-extended but nonetheless unsigned,
1737 we don't directly use tree_int_cst_sgn. */
1738 offset
= remove_conversions (offset
, true);
1739 if (TREE_CODE (offset
) == BIT_AND_EXPR
1740 && host_integerp (TREE_OPERAND (offset
, 1), 0)
1741 && TREE_INT_CST_HIGH (TREE_OPERAND (offset
, 1)) < 0)
1744 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
1745 if (exact_log2 (pow
) > 0)
1749 pos
= compute_related_constant (curpos
,
1750 round_up (last_pos
, align
));
1752 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
1753 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
1754 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1755 && host_integerp (TREE_OPERAND
1756 (TREE_OPERAND (curpos
, 0), 1),
1761 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
1762 pos
= compute_related_constant (curpos
,
1763 round_up (last_pos
, align
));
1765 else if (potential_alignment_gap (prev_old_field
, old_field
,
1768 align
= TYPE_ALIGN (field_type
);
1769 pos
= compute_related_constant (curpos
,
1770 round_up (last_pos
, align
));
1773 /* If we can't compute a position, set it to zero.
1775 ??? We really should abort here, but it's too much work
1776 to get this correct for all cases. */
1779 pos
= bitsize_zero_node
;
1781 /* See if this type is variable-sized and make a pointer type
1782 and indicate the indirection if so. Beware that the debug
1783 back-end may adjust the position computed above according
1784 to the alignment of the field type, i.e. the pointer type
1785 in this case, if we don't preventively counter that. */
1786 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1788 field_type
= build_pointer_type (field_type
);
1789 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1791 field_type
= copy_node (field_type
);
1792 TYPE_ALIGN (field_type
) = align
;
1797 /* Make a new field name, if necessary. */
1798 if (var
|| align
!= 0)
1803 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1804 align
/ BITS_PER_UNIT
);
1806 strcpy (suffix
, "XVL");
1808 field_name
= concat_name (field_name
, suffix
);
1812 = create_field_decl (field_name
, field_type
, new_record_type
,
1813 DECL_SIZE (old_field
), pos
, 0, 0);
1814 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1815 TYPE_FIELDS (new_record_type
) = new_field
;
1817 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1818 zero. The only time it's not the last field of the record
1819 is when there are other components at fixed positions after
1820 it (meaning there was a rep clause for every field) and we
1821 want to be able to encode them. */
1822 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1823 (TREE_CODE (TREE_TYPE (old_field
))
1826 : DECL_SIZE (old_field
));
1827 prev_old_field
= old_field
;
1830 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1832 add_parallel_type (record_type
, new_record_type
);
1836 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1837 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1838 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1839 replace a value of zero with the old size. If HAS_REP is true, we take the
1840 MAX of the end position of this field with LAST_SIZE. In all other cases,
1841 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1844 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1847 tree type
= TREE_TYPE (last_size
);
1850 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1852 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1854 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1858 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1859 integer_zerop (TREE_OPERAND (size
, 1))
1860 ? last_size
: merge_sizes (last_size
, first_bit
,
1861 TREE_OPERAND (size
, 1),
1863 integer_zerop (TREE_OPERAND (size
, 2))
1864 ? last_size
: merge_sizes (last_size
, first_bit
,
1865 TREE_OPERAND (size
, 2),
1868 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1869 when fed through substitute_in_expr) into thinking that a constant
1870 size is not constant. */
1871 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1872 new_size
= TREE_OPERAND (new_size
, 0);
1877 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1878 related by the addition of a constant. Return that constant if so. */
1881 compute_related_constant (tree op0
, tree op1
)
1883 tree op0_var
, op1_var
;
1884 tree op0_con
= split_plus (op0
, &op0_var
);
1885 tree op1_con
= split_plus (op1
, &op1_var
);
1886 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1888 if (operand_equal_p (op0_var
, op1_var
, 0))
1890 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1896 /* Utility function of above to split a tree OP which may be a sum, into a
1897 constant part, which is returned, and a variable part, which is stored
1898 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1902 split_plus (tree in
, tree
*pvar
)
1904 /* Strip conversions in order to ease the tree traversal and maximize the
1905 potential for constant or plus/minus discovery. We need to be careful
1906 to always return and set *pvar to bitsizetype trees, but it's worth
1908 in
= remove_conversions (in
, false);
1910 *pvar
= convert (bitsizetype
, in
);
1912 if (TREE_CODE (in
) == INTEGER_CST
)
1914 *pvar
= bitsize_zero_node
;
1915 return convert (bitsizetype
, in
);
1917 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1919 tree lhs_var
, rhs_var
;
1920 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1921 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1923 if (lhs_var
== TREE_OPERAND (in
, 0)
1924 && rhs_var
== TREE_OPERAND (in
, 1))
1925 return bitsize_zero_node
;
1927 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1928 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1931 return bitsize_zero_node
;
1934 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1935 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1936 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1937 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1938 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1939 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1940 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1941 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1942 invisible reference. */
1945 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1946 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1947 bool return_by_invisi_ref_p
)
1949 /* A list of the data type nodes of the subprogram formal parameters.
1950 This list is generated by traversing the input list of PARM_DECL
1952 VEC(tree
,gc
) *param_type_list
= NULL
;
1955 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1956 VEC_safe_push (tree
, gc
, param_type_list
, TREE_TYPE (t
));
1958 type
= build_function_type_vec (return_type
, param_type_list
);
1960 /* TYPE may have been shared since GCC hashes types. If it has a different
1961 CICO_LIST, make a copy. Likewise for the various flags. */
1962 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1963 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1965 type
= copy_type (type
);
1966 TYPE_CI_CO_LIST (type
) = cico_list
;
1967 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1968 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1969 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1975 /* Return a copy of TYPE but safe to modify in any way. */
1978 copy_type (tree type
)
1980 tree new_type
= copy_node (type
);
1982 /* Unshare the language-specific data. */
1983 if (TYPE_LANG_SPECIFIC (type
))
1985 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1986 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1989 /* And the contents of the language-specific slot if needed. */
1990 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1991 && TYPE_RM_VALUES (type
))
1993 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
1994 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
1995 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
1996 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
1999 /* copy_node clears this field instead of copying it, because it is
2000 aliased with TREE_CHAIN. */
2001 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
2003 TYPE_POINTER_TO (new_type
) = 0;
2004 TYPE_REFERENCE_TO (new_type
) = 0;
2005 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2006 TYPE_NEXT_VARIANT (new_type
) = 0;
2011 /* Return a subtype of sizetype with range MIN to MAX and whose
2012 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2013 of the associated TYPE_DECL. */
2016 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2018 /* First build a type for the desired range. */
2019 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2021 /* Then set the index type. */
2022 SET_TYPE_INDEX_TYPE (type
, index
);
2023 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
2028 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2029 sizetype is used. */
2032 create_range_type (tree type
, tree min
, tree max
)
2036 if (type
== NULL_TREE
)
2039 /* First build a type with the base range. */
2040 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2041 TYPE_MAX_VALUE (type
));
2043 /* Then set the actual range. */
2044 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2045 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2050 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2051 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2055 create_type_stub_decl (tree type_name
, tree type
)
2057 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2058 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2059 emitted in DWARF. */
2060 tree type_decl
= build_decl (input_location
,
2061 TYPE_DECL
, type_name
, type
);
2062 DECL_ARTIFICIAL (type_decl
) = 1;
2063 TYPE_ARTIFICIAL (type
) = 1;
2067 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2068 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2069 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2070 true if we need to write debug information about this type. GNAT_NODE
2071 is used for the position of the decl. */
2074 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
2075 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
2077 enum tree_code code
= TREE_CODE (type
);
2078 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2081 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2082 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2084 /* If the type hasn't been named yet, we're naming it; preserve an existing
2085 TYPE_STUB_DECL that has been attached to it for some purpose. */
2086 if (!named
&& TYPE_STUB_DECL (type
))
2088 type_decl
= TYPE_STUB_DECL (type
);
2089 DECL_NAME (type_decl
) = type_name
;
2092 type_decl
= build_decl (input_location
,
2093 TYPE_DECL
, type_name
, type
);
2095 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2096 TYPE_ARTIFICIAL (type
) = artificial_p
;
2098 /* Add this decl to the current binding level. */
2099 gnat_pushdecl (type_decl
, gnat_node
);
2101 process_attributes (type_decl
, attr_list
);
2103 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2104 This causes the name to be also viewed as a "tag" by the debug
2105 back-end, with the advantage that no DW_TAG_typedef is emitted
2106 for artificial "tagged" types in DWARF. */
2108 TYPE_STUB_DECL (type
) = type_decl
;
2110 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2111 back-end doesn't support, and for others if we don't need to. */
2112 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2113 DECL_IGNORED_P (type_decl
) = 1;
2118 /* Return a VAR_DECL or CONST_DECL node.
2120 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2121 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2122 the GCC tree for an optional initial expression; NULL_TREE if none.
2124 CONST_FLAG is true if this variable is constant, in which case we might
2125 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2127 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2128 definition to be made visible outside of the current compilation unit, for
2129 instance variable definitions in a package specification.
2131 EXTERN_FLAG is true when processing an external variable declaration (as
2132 opposed to a definition: no storage is to be allocated for the variable).
2134 STATIC_FLAG is only relevant when not at top level. In that case
2135 it indicates whether to always allocate storage to the variable.
2137 GNAT_NODE is used for the position of the decl. */
2140 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2141 bool const_flag
, bool public_flag
, bool extern_flag
,
2142 bool static_flag
, bool const_decl_allowed_p
,
2143 struct attrib
*attr_list
, Node_Id gnat_node
)
2145 /* Whether the initializer is a constant initializer. At the global level
2146 or for an external object or an object to be allocated in static memory,
2147 we check that it is a valid constant expression for use in initializing
2148 a static variable; otherwise, we only check that it is constant. */
2151 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2152 && (global_bindings_p () || extern_flag
|| static_flag
2153 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2154 : TREE_CONSTANT (var_init
)));
2156 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2157 case the initializer may be used in-lieu of the DECL node (as done in
2158 Identifier_to_gnu). This is useful to prevent the need of elaboration
2159 code when an identifier for which such a decl is made is in turn used as
2160 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2161 but extra constraints apply to this choice (see below) and are not
2162 relevant to the distinction we wish to make. */
2163 bool constant_p
= const_flag
&& init_const
;
2165 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2166 and may be used for scalars in general but not for aggregates. */
2168 = build_decl (input_location
,
2169 (constant_p
&& const_decl_allowed_p
2170 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2173 /* If this is external, throw away any initializations (they will be done
2174 elsewhere) unless this is a constant for which we would like to remain
2175 able to get the initializer. If we are defining a global here, leave a
2176 constant initialization and save any variable elaborations for the
2177 elaboration routine. If we are just annotating types, throw away the
2178 initialization if it isn't a constant. */
2179 if ((extern_flag
&& !constant_p
)
2180 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2181 var_init
= NULL_TREE
;
2183 /* At the global level, an initializer requiring code to be generated
2184 produces elaboration statements. Check that such statements are allowed,
2185 that is, not violating a No_Elaboration_Code restriction. */
2186 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2187 Check_Elaboration_Code_Allowed (gnat_node
);
2189 DECL_INITIAL (var_decl
) = var_init
;
2190 TREE_READONLY (var_decl
) = const_flag
;
2191 DECL_EXTERNAL (var_decl
) = extern_flag
;
2192 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2193 TREE_CONSTANT (var_decl
) = constant_p
;
2194 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2195 = TYPE_VOLATILE (type
);
2197 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2198 try to fiddle with DECL_COMMON. However, on platforms that don't
2199 support global BSS sections, uninitialized global variables would
2200 go in DATA instead, thus increasing the size of the executable. */
2202 && TREE_CODE (var_decl
) == VAR_DECL
2203 && TREE_PUBLIC (var_decl
)
2204 && !have_global_bss_p ())
2205 DECL_COMMON (var_decl
) = 1;
2207 /* At the global binding level, we need to allocate static storage for the
2208 variable if it isn't external. Otherwise, we allocate automatic storage
2209 unless requested not to. */
2210 TREE_STATIC (var_decl
)
2211 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2213 /* For an external constant whose initializer is not absolute, do not emit
2214 debug info. In DWARF this would mean a global relocation in a read-only
2215 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2219 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2220 != null_pointer_node
)
2221 DECL_IGNORED_P (var_decl
) = 1;
2223 /* Add this decl to the current binding level. */
2224 gnat_pushdecl (var_decl
, gnat_node
);
2226 if (TREE_SIDE_EFFECTS (var_decl
))
2227 TREE_ADDRESSABLE (var_decl
) = 1;
2229 if (TREE_CODE (var_decl
) == VAR_DECL
)
2232 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2233 process_attributes (var_decl
, attr_list
);
2234 if (global_bindings_p ())
2235 rest_of_decl_compilation (var_decl
, true, 0);
2241 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2244 aggregate_type_contains_array_p (tree type
)
2246 switch (TREE_CODE (type
))
2250 case QUAL_UNION_TYPE
:
2253 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2254 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2255 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2268 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2269 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2270 nonzero, it is the specified size of the field. If POS is nonzero, it is
2271 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2272 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2273 means we are allowed to take the address of the field; if it is negative,
2274 we should not make a bitfield, which is used by make_aligning_type. */
2277 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2278 tree size
, tree pos
, int packed
, int addressable
)
2280 tree field_decl
= build_decl (input_location
,
2281 FIELD_DECL
, field_name
, field_type
);
2283 DECL_CONTEXT (field_decl
) = record_type
;
2284 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2286 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2287 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2288 Likewise for an aggregate without specified position that contains an
2289 array, because in this case slices of variable length of this array
2290 must be handled by GCC and variable-sized objects need to be aligned
2291 to at least a byte boundary. */
2292 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2294 && AGGREGATE_TYPE_P (field_type
)
2295 && aggregate_type_contains_array_p (field_type
))))
2296 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2298 /* If a size is specified, use it. Otherwise, if the record type is packed
2299 compute a size to use, which may differ from the object's natural size.
2300 We always set a size in this case to trigger the checks for bitfield
2301 creation below, which is typically required when no position has been
2304 size
= convert (bitsizetype
, size
);
2305 else if (packed
== 1)
2307 size
= rm_size (field_type
);
2308 if (TYPE_MODE (field_type
) == BLKmode
)
2309 size
= round_up (size
, BITS_PER_UNIT
);
2312 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2313 specified for two reasons: first if the size differs from the natural
2314 size. Second, if the alignment is insufficient. There are a number of
2315 ways the latter can be true.
2317 We never make a bitfield if the type of the field has a nonconstant size,
2318 because no such entity requiring bitfield operations should reach here.
2320 We do *preventively* make a bitfield when there might be the need for it
2321 but we don't have all the necessary information to decide, as is the case
2322 of a field with no specified position in a packed record.
2324 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2325 in layout_decl or finish_record_type to clear the bit_field indication if
2326 it is in fact not needed. */
2327 if (addressable
>= 0
2329 && TREE_CODE (size
) == INTEGER_CST
2330 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2331 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2332 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2334 || (TYPE_ALIGN (record_type
) != 0
2335 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2337 DECL_BIT_FIELD (field_decl
) = 1;
2338 DECL_SIZE (field_decl
) = size
;
2339 if (!packed
&& !pos
)
2341 if (TYPE_ALIGN (record_type
) != 0
2342 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2343 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2345 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2349 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2351 /* Bump the alignment if need be, either for bitfield/packing purposes or
2352 to satisfy the type requirements if no such consideration applies. When
2353 we get the alignment from the type, indicate if this is from an explicit
2354 user request, which prevents stor-layout from lowering it later on. */
2356 unsigned int bit_align
2357 = (DECL_BIT_FIELD (field_decl
) ? 1
2358 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2360 if (bit_align
> DECL_ALIGN (field_decl
))
2361 DECL_ALIGN (field_decl
) = bit_align
;
2362 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2364 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2365 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2371 /* We need to pass in the alignment the DECL is known to have.
2372 This is the lowest-order bit set in POS, but no more than
2373 the alignment of the record, if one is specified. Note
2374 that an alignment of 0 is taken as infinite. */
2375 unsigned int known_align
;
2377 if (host_integerp (pos
, 1))
2378 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
2380 known_align
= BITS_PER_UNIT
;
2382 if (TYPE_ALIGN (record_type
)
2383 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2384 known_align
= TYPE_ALIGN (record_type
);
2386 layout_decl (field_decl
, known_align
);
2387 SET_DECL_OFFSET_ALIGN (field_decl
,
2388 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
2390 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2391 &DECL_FIELD_BIT_OFFSET (field_decl
),
2392 DECL_OFFSET_ALIGN (field_decl
), pos
);
2395 /* In addition to what our caller says, claim the field is addressable if we
2396 know that its type is not suitable.
2398 The field may also be "technically" nonaddressable, meaning that even if
2399 we attempt to take the field's address we will actually get the address
2400 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2401 value we have at this point is not accurate enough, so we don't account
2402 for this here and let finish_record_type decide. */
2403 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2406 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2411 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2412 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2413 (either an In parameter or an address of a pass-by-ref parameter). */
2416 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2418 tree param_decl
= build_decl (input_location
,
2419 PARM_DECL
, param_name
, param_type
);
2421 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2422 can lead to various ABI violations. */
2423 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2424 && INTEGRAL_TYPE_P (param_type
)
2425 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2427 /* We have to be careful about biased types here. Make a subtype
2428 of integer_type_node with the proper biasing. */
2429 if (TREE_CODE (param_type
) == INTEGER_TYPE
2430 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2433 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2434 TREE_TYPE (subtype
) = integer_type_node
;
2435 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2436 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2437 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2438 param_type
= subtype
;
2441 param_type
= integer_type_node
;
2444 DECL_ARG_TYPE (param_decl
) = param_type
;
2445 TREE_READONLY (param_decl
) = readonly
;
2449 /* Given a DECL and ATTR_LIST, process the listed attributes. */
2452 process_attributes (tree decl
, struct attrib
*attr_list
)
2454 for (; attr_list
; attr_list
= attr_list
->next
)
2455 switch (attr_list
->type
)
2457 case ATTR_MACHINE_ATTRIBUTE
:
2458 input_location
= DECL_SOURCE_LOCATION (decl
);
2459 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
2461 ATTR_FLAG_TYPE_IN_PLACE
);
2464 case ATTR_LINK_ALIAS
:
2465 if (! DECL_EXTERNAL (decl
))
2467 TREE_STATIC (decl
) = 1;
2468 assemble_alias (decl
, attr_list
->name
);
2472 case ATTR_WEAK_EXTERNAL
:
2474 declare_weak (decl
);
2476 post_error ("?weak declarations not supported on this target",
2477 attr_list
->error_point
);
2480 case ATTR_LINK_SECTION
:
2481 if (targetm_common
.have_named_sections
)
2483 DECL_SECTION_NAME (decl
)
2484 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
2485 IDENTIFIER_POINTER (attr_list
->name
));
2486 DECL_COMMON (decl
) = 0;
2489 post_error ("?section attributes are not supported for this target",
2490 attr_list
->error_point
);
2493 case ATTR_LINK_CONSTRUCTOR
:
2494 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
2495 TREE_USED (decl
) = 1;
2498 case ATTR_LINK_DESTRUCTOR
:
2499 DECL_STATIC_DESTRUCTOR (decl
) = 1;
2500 TREE_USED (decl
) = 1;
2503 case ATTR_THREAD_LOCAL_STORAGE
:
2504 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
2505 DECL_COMMON (decl
) = 0;
2510 /* Record DECL as a global renaming pointer. */
2513 record_global_renaming_pointer (tree decl
)
2515 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2516 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
2519 /* Invalidate the global renaming pointers. */
2522 invalidate_global_renaming_pointers (void)
2527 FOR_EACH_VEC_ELT (tree
, global_renaming_pointers
, i
, iter
)
2528 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2530 VEC_free (tree
, gc
, global_renaming_pointers
);
2533 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2537 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2539 if (host_integerp (value
, 1))
2540 return tree_low_cst (value
, 1) % factor
== 0;
2542 if (TREE_CODE (value
) == MULT_EXPR
)
2543 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2544 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2549 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2550 unless we can prove these 2 fields are laid out in such a way that no gap
2551 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2552 is the distance in bits between the end of PREV_FIELD and the starting
2553 position of CURR_FIELD. It is ignored if null. */
2556 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2558 /* If this is the first field of the record, there cannot be any gap */
2562 /* If the previous field is a union type, then return False: The only
2563 time when such a field is not the last field of the record is when
2564 there are other components at fixed positions after it (meaning there
2565 was a rep clause for every field), in which case we don't want the
2566 alignment constraint to override them. */
2567 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2570 /* If the distance between the end of prev_field and the beginning of
2571 curr_field is constant, then there is a gap if the value of this
2572 constant is not null. */
2573 if (offset
&& host_integerp (offset
, 1))
2574 return !integer_zerop (offset
);
2576 /* If the size and position of the previous field are constant,
2577 then check the sum of this size and position. There will be a gap
2578 iff it is not multiple of the current field alignment. */
2579 if (host_integerp (DECL_SIZE (prev_field
), 1)
2580 && host_integerp (bit_position (prev_field
), 1))
2581 return ((tree_low_cst (bit_position (prev_field
), 1)
2582 + tree_low_cst (DECL_SIZE (prev_field
), 1))
2583 % DECL_ALIGN (curr_field
) != 0);
2585 /* If both the position and size of the previous field are multiples
2586 of the current field alignment, there cannot be any gap. */
2587 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2588 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2591 /* Fallback, return that there may be a potential gap */
2595 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2599 create_label_decl (tree label_name
, Node_Id gnat_node
)
2602 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2604 DECL_MODE (label_decl
) = VOIDmode
;
2606 /* Add this decl to the current binding level. */
2607 gnat_pushdecl (label_decl
, gnat_node
);
2612 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2613 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2614 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2615 PARM_DECL nodes chained through the DECL_CHAIN field).
2617 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2618 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2619 used for the position of the decl. */
2622 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2623 tree param_decl_list
, bool inline_flag
, bool public_flag
,
2624 bool extern_flag
, bool artificial_flag
,
2625 struct attrib
*attr_list
, Node_Id gnat_node
)
2627 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2629 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2630 TREE_TYPE (subprog_type
));
2631 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2633 /* If this is a non-inline function nested inside an inlined external
2634 function, we cannot honor both requests without cloning the nested
2635 function in the current unit since it is private to the other unit.
2636 We could inline the nested function as well but it's probably better
2637 to err on the side of too little inlining. */
2640 && current_function_decl
2641 && DECL_DECLARED_INLINE_P (current_function_decl
)
2642 && DECL_EXTERNAL (current_function_decl
))
2643 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2645 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2646 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2647 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
2648 DECL_NO_INLINE_WARNING_P (subprog_decl
) = inline_flag
&& artificial_flag
;
2650 TREE_PUBLIC (subprog_decl
) = public_flag
;
2651 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2652 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2653 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2655 DECL_ARTIFICIAL (result_decl
) = 1;
2656 DECL_IGNORED_P (result_decl
) = 1;
2657 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2658 DECL_RESULT (subprog_decl
) = result_decl
;
2662 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2664 /* The expand_main_function circuitry expects "main_identifier_node" to
2665 designate the DECL_NAME of the 'main' entry point, in turn expected
2666 to be declared as the "main" function literally by default. Ada
2667 program entry points are typically declared with a different name
2668 within the binder generated file, exported as 'main' to satisfy the
2669 system expectations. Force main_identifier_node in this case. */
2670 if (asm_name
== main_identifier_node
)
2671 DECL_NAME (subprog_decl
) = main_identifier_node
;
2674 /* Add this decl to the current binding level. */
2675 gnat_pushdecl (subprog_decl
, gnat_node
);
2677 process_attributes (subprog_decl
, attr_list
);
2679 /* Output the assembler code and/or RTL for the declaration. */
2680 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2682 return subprog_decl
;
2685 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2686 body. This routine needs to be invoked before processing the declarations
2687 appearing in the subprogram. */
2690 begin_subprog_body (tree subprog_decl
)
2694 announce_function (subprog_decl
);
2696 /* This function is being defined. */
2697 TREE_STATIC (subprog_decl
) = 1;
2699 current_function_decl
= subprog_decl
;
2701 /* Enter a new binding level and show that all the parameters belong to
2705 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2706 param_decl
= DECL_CHAIN (param_decl
))
2707 DECL_CONTEXT (param_decl
) = subprog_decl
;
2709 make_decl_rtl (subprog_decl
);
2712 /* Finish translating the current subprogram and set its BODY. */
2715 end_subprog_body (tree body
)
2717 tree fndecl
= current_function_decl
;
2719 /* Attach the BLOCK for this level to the function and pop the level. */
2720 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2721 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2724 /* Mark the RESULT_DECL as being in this subprogram. */
2725 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2727 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2728 if (TREE_CODE (body
) == BIND_EXPR
)
2730 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2731 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2734 DECL_SAVED_TREE (fndecl
) = body
;
2736 current_function_decl
= decl_function_context (fndecl
);
2739 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2742 rest_of_subprog_body_compilation (tree subprog_decl
)
2744 /* We cannot track the location of errors past this point. */
2745 error_gnat_node
= Empty
;
2747 /* If we're only annotating types, don't actually compile this function. */
2748 if (type_annotate_only
)
2751 /* Dump functions before gimplification. */
2752 dump_function (TDI_original
, subprog_decl
);
2754 if (!decl_function_context (subprog_decl
))
2755 cgraph_finalize_function (subprog_decl
, false);
2757 /* Register this function with cgraph just far enough to get it
2758 added to our parent's nested function list. */
2759 (void) cgraph_get_create_node (subprog_decl
);
2763 gnat_builtin_function (tree decl
)
2765 gnat_pushdecl (decl
, Empty
);
2769 /* Return an integer type with the number of bits of precision given by
2770 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2771 it is a signed type. */
2774 gnat_type_for_size (unsigned precision
, int unsignedp
)
2779 if (precision
<= 2 * MAX_BITS_PER_WORD
2780 && signed_and_unsigned_types
[precision
][unsignedp
])
2781 return signed_and_unsigned_types
[precision
][unsignedp
];
2784 t
= make_unsigned_type (precision
);
2786 t
= make_signed_type (precision
);
2788 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2789 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2793 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2794 TYPE_NAME (t
) = get_identifier (type_name
);
2800 /* Likewise for floating-point types. */
2803 float_type_for_precision (int precision
, enum machine_mode mode
)
2808 if (float_types
[(int) mode
])
2809 return float_types
[(int) mode
];
2811 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2812 TYPE_PRECISION (t
) = precision
;
2815 gcc_assert (TYPE_MODE (t
) == mode
);
2818 sprintf (type_name
, "FLOAT_%d", precision
);
2819 TYPE_NAME (t
) = get_identifier (type_name
);
2825 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2826 an unsigned type; otherwise a signed type is returned. */
2829 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2831 if (mode
== BLKmode
)
2834 if (mode
== VOIDmode
)
2835 return void_type_node
;
2837 if (COMPLEX_MODE_P (mode
))
2840 if (SCALAR_FLOAT_MODE_P (mode
))
2841 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2843 if (SCALAR_INT_MODE_P (mode
))
2844 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2846 if (VECTOR_MODE_P (mode
))
2848 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2849 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2851 return build_vector_type_for_mode (inner_type
, mode
);
2857 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2860 gnat_unsigned_type (tree type_node
)
2862 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2864 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2866 type
= copy_node (type
);
2867 TREE_TYPE (type
) = type_node
;
2869 else if (TREE_TYPE (type_node
)
2870 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2871 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2873 type
= copy_node (type
);
2874 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2880 /* Return the signed version of a TYPE_NODE, a scalar type. */
2883 gnat_signed_type (tree type_node
)
2885 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2887 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2889 type
= copy_node (type
);
2890 TREE_TYPE (type
) = type_node
;
2892 else if (TREE_TYPE (type_node
)
2893 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2894 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2896 type
= copy_node (type
);
2897 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2903 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2904 transparently converted to each other. */
2907 gnat_types_compatible_p (tree t1
, tree t2
)
2909 enum tree_code code
;
2911 /* This is the default criterion. */
2912 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2915 /* We only check structural equivalence here. */
2916 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2919 /* Vector types are also compatible if they have the same number of subparts
2920 and the same form of (scalar) element type. */
2921 if (code
== VECTOR_TYPE
2922 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2923 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2924 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2927 /* Array types are also compatible if they are constrained and have the same
2928 domain(s) and the same component type. */
2929 if (code
== ARRAY_TYPE
2930 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2931 || (TYPE_DOMAIN (t1
)
2933 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2934 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2935 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2936 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2937 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2938 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2939 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
2945 /* Return true if EXPR is a useless type conversion. */
2948 gnat_useless_type_conversion (tree expr
)
2950 if (CONVERT_EXPR_P (expr
)
2951 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2952 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
2953 return gnat_types_compatible_p (TREE_TYPE (expr
),
2954 TREE_TYPE (TREE_OPERAND (expr
, 0)));
2959 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2962 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
2963 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
2965 return TYPE_CI_CO_LIST (t
) == cico_list
2966 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
2967 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
2968 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
2971 /* EXP is an expression for the size of an object. If this size contains
2972 discriminant references, replace them with the maximum (if MAX_P) or
2973 minimum (if !MAX_P) possible value of the discriminant. */
2976 max_size (tree exp
, bool max_p
)
2978 enum tree_code code
= TREE_CODE (exp
);
2979 tree type
= TREE_TYPE (exp
);
2981 switch (TREE_CODE_CLASS (code
))
2983 case tcc_declaration
:
2988 if (code
== CALL_EXPR
)
2993 t
= maybe_inline_call_in_expr (exp
);
2995 return max_size (t
, max_p
);
2997 n
= call_expr_nargs (exp
);
2999 argarray
= XALLOCAVEC (tree
, n
);
3000 for (i
= 0; i
< n
; i
++)
3001 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
3002 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3007 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3008 modify. Otherwise, we treat it like a variable. */
3009 if (!CONTAINS_PLACEHOLDER_P (exp
))
3012 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3014 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3016 case tcc_comparison
:
3017 return max_p
? size_one_node
: size_zero_node
;
3021 case tcc_expression
:
3022 switch (TREE_CODE_LENGTH (code
))
3025 if (code
== SAVE_EXPR
)
3027 else if (code
== NON_LVALUE_EXPR
)
3028 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3031 fold_build1 (code
, type
,
3032 max_size (TREE_OPERAND (exp
, 0),
3033 code
== NEGATE_EXPR
? !max_p
: max_p
));
3036 if (code
== COMPOUND_EXPR
)
3037 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3040 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3041 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3042 code
== MINUS_EXPR
? !max_p
: max_p
);
3044 /* Special-case wanting the maximum value of a MIN_EXPR.
3045 In that case, if one side overflows, return the other.
3046 sizetype is signed, but we know sizes are non-negative.
3047 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3048 overflowing and the RHS a variable. */
3051 && TREE_CODE (rhs
) == INTEGER_CST
3052 && TREE_OVERFLOW (rhs
))
3056 && TREE_CODE (lhs
) == INTEGER_CST
3057 && TREE_OVERFLOW (lhs
))
3059 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3060 && TREE_CODE (lhs
) == INTEGER_CST
3061 && TREE_OVERFLOW (lhs
)
3062 && !TREE_CONSTANT (rhs
))
3065 return fold_build2 (code
, type
, lhs
, rhs
);
3069 if (code
== COND_EXPR
)
3070 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3071 max_size (TREE_OPERAND (exp
, 1), max_p
),
3072 max_size (TREE_OPERAND (exp
, 2), max_p
));
3075 /* Other tree classes cannot happen. */
3083 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3084 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3085 Return a constructor for the template. */
3088 build_template (tree template_type
, tree array_type
, tree expr
)
3090 VEC(constructor_elt
,gc
) *template_elts
= NULL
;
3091 tree bound_list
= NULL_TREE
;
3094 while (TREE_CODE (array_type
) == RECORD_TYPE
3095 && (TYPE_PADDING_P (array_type
)
3096 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3097 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3099 if (TREE_CODE (array_type
) == ARRAY_TYPE
3100 || (TREE_CODE (array_type
) == INTEGER_TYPE
3101 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3102 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3104 /* First make the list for a CONSTRUCTOR for the template. Go down the
3105 field list of the template instead of the type chain because this
3106 array might be an Ada array of arrays and we can't tell where the
3107 nested arrays stop being the underlying object. */
3109 for (field
= TYPE_FIELDS (template_type
); field
;
3111 ? (bound_list
= TREE_CHAIN (bound_list
))
3112 : (array_type
= TREE_TYPE (array_type
))),
3113 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3115 tree bounds
, min
, max
;
3117 /* If we have a bound list, get the bounds from there. Likewise
3118 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3119 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3120 This will give us a maximum range. */
3122 bounds
= TREE_VALUE (bound_list
);
3123 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3124 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3125 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3126 && DECL_BY_COMPONENT_PTR_P (expr
))
3127 bounds
= TREE_TYPE (field
);
3131 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3132 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3134 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3135 substitute it from OBJECT. */
3136 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3137 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3139 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3140 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3143 return gnat_build_constructor (template_type
, template_elts
);
3146 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3147 being built; the new decl is chained on to the front of the list. */
3150 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3151 tree initial
, tree field_list
)
3154 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3157 DECL_INITIAL (field
) = initial
;
3158 DECL_CHAIN (field
) = field_list
;
3162 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3163 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3164 type contains in its DECL_INITIAL the expression to use when a constructor
3165 is made for the type. GNAT_ENTITY is an entity used to print out an error
3166 message if the mechanism cannot be applied to an object of that type and
3167 also for the name. */
3170 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3172 tree record_type
= make_node (RECORD_TYPE
);
3173 tree pointer32_type
, pointer64_type
;
3174 tree field_list
= NULL_TREE
;
3175 int klass
, ndim
, i
, dtype
= 0;
3176 tree inner_type
, tem
;
3179 /* If TYPE is an unconstrained array, use the underlying array type. */
3180 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3181 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3183 /* If this is an array, compute the number of dimensions in the array,
3184 get the index types, and point to the inner type. */
3185 if (TREE_CODE (type
) != ARRAY_TYPE
)
3188 for (ndim
= 1, inner_type
= type
;
3189 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3190 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3191 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3194 idx_arr
= XALLOCAVEC (tree
, ndim
);
3196 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3197 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3198 for (i
= ndim
- 1, inner_type
= type
;
3200 i
--, inner_type
= TREE_TYPE (inner_type
))
3201 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3203 for (i
= 0, inner_type
= type
;
3205 i
++, inner_type
= TREE_TYPE (inner_type
))
3206 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3208 /* Now get the DTYPE value. */
3209 switch (TREE_CODE (type
))
3214 if (TYPE_VAX_FLOATING_POINT_P (type
))
3215 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3228 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3231 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3234 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3237 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3240 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3243 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3249 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3253 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3254 && TYPE_VAX_FLOATING_POINT_P (type
))
3255 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3267 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3278 /* Get the CLASS value. */
3281 case By_Descriptor_A
:
3282 case By_Short_Descriptor_A
:
3285 case By_Descriptor_NCA
:
3286 case By_Short_Descriptor_NCA
:
3289 case By_Descriptor_SB
:
3290 case By_Short_Descriptor_SB
:
3294 case By_Short_Descriptor
:
3295 case By_Descriptor_S
:
3296 case By_Short_Descriptor_S
:
3302 /* Make the type for a descriptor for VMS. The first four fields are the
3303 same for all types. */
3305 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3306 size_in_bytes ((mech
== By_Descriptor_A
3307 || mech
== By_Short_Descriptor_A
)
3308 ? inner_type
: type
),
3311 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3312 size_int (dtype
), field_list
);
3314 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3315 size_int (klass
), field_list
);
3317 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3318 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3320 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3321 that we cannot build a template call to the CE routine as it would get a
3322 wrong source location; instead we use a second placeholder for it. */
3323 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3324 build0 (PLACEHOLDER_EXPR
, type
));
3325 tem
= build3 (COND_EXPR
, pointer32_type
,
3327 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3328 build_int_cstu (pointer64_type
, 0x80000000))
3329 : boolean_false_node
,
3330 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3331 convert (pointer32_type
, tem
));
3334 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3340 case By_Short_Descriptor
:
3341 case By_Descriptor_S
:
3342 case By_Short_Descriptor_S
:
3345 case By_Descriptor_SB
:
3346 case By_Short_Descriptor_SB
:
3348 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3350 (TREE_CODE (type
) == ARRAY_TYPE
3351 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3355 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3357 (TREE_CODE (type
) == ARRAY_TYPE
3358 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3363 case By_Descriptor_A
:
3364 case By_Short_Descriptor_A
:
3365 case By_Descriptor_NCA
:
3366 case By_Short_Descriptor_NCA
:
3368 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3369 record_type
, size_zero_node
, field_list
);
3372 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3373 record_type
, size_zero_node
, field_list
);
3376 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3378 size_int ((mech
== By_Descriptor_NCA
3379 || mech
== By_Short_Descriptor_NCA
)
3381 /* Set FL_COLUMN, FL_COEFF, and
3383 : (TREE_CODE (type
) == ARRAY_TYPE
3384 && TYPE_CONVENTION_FORTRAN_P
3390 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3391 record_type
, size_int (ndim
), field_list
);
3394 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3395 record_type
, size_in_bytes (type
),
3398 /* Now build a pointer to the 0,0,0... element. */
3399 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3400 for (i
= 0, inner_type
= type
; i
< ndim
;
3401 i
++, inner_type
= TREE_TYPE (inner_type
))
3402 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3403 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3404 NULL_TREE
, NULL_TREE
);
3407 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3408 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3411 /* Next come the addressing coefficients. */
3412 tem
= size_one_node
;
3413 for (i
= 0; i
< ndim
; i
++)
3417 = size_binop (MULT_EXPR
, tem
,
3418 size_binop (PLUS_EXPR
,
3419 size_binop (MINUS_EXPR
,
3420 TYPE_MAX_VALUE (idx_arr
[i
]),
3421 TYPE_MIN_VALUE (idx_arr
[i
])),
3424 fname
[0] = ((mech
== By_Descriptor_NCA
||
3425 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3426 fname
[1] = '0' + i
, fname
[2] = 0;
3428 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3429 record_type
, idx_length
, field_list
);
3431 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3435 /* Finally here are the bounds. */
3436 for (i
= 0; i
< ndim
; i
++)
3440 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3442 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3443 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3448 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3449 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3455 post_error ("unsupported descriptor type for &", gnat_entity
);
3458 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3459 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3463 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3464 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3465 type contains in its DECL_INITIAL the expression to use when a constructor
3466 is made for the type. GNAT_ENTITY is an entity used to print out an error
3467 message if the mechanism cannot be applied to an object of that type and
3468 also for the name. */
3471 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3473 tree record_type
= make_node (RECORD_TYPE
);
3474 tree pointer64_type
;
3475 tree field_list
= NULL_TREE
;
3476 int klass
, ndim
, i
, dtype
= 0;
3477 tree inner_type
, tem
;
3480 /* If TYPE is an unconstrained array, use the underlying array type. */
3481 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3482 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3484 /* If this is an array, compute the number of dimensions in the array,
3485 get the index types, and point to the inner type. */
3486 if (TREE_CODE (type
) != ARRAY_TYPE
)
3489 for (ndim
= 1, inner_type
= type
;
3490 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3491 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3492 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3495 idx_arr
= XALLOCAVEC (tree
, ndim
);
3497 if (mech
!= By_Descriptor_NCA
3498 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3499 for (i
= ndim
- 1, inner_type
= type
;
3501 i
--, inner_type
= TREE_TYPE (inner_type
))
3502 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3504 for (i
= 0, inner_type
= type
;
3506 i
++, inner_type
= TREE_TYPE (inner_type
))
3507 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3509 /* Now get the DTYPE value. */
3510 switch (TREE_CODE (type
))
3515 if (TYPE_VAX_FLOATING_POINT_P (type
))
3516 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3529 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3532 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3535 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3538 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3541 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3544 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3550 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3554 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3555 && TYPE_VAX_FLOATING_POINT_P (type
))
3556 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
3568 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3579 /* Get the CLASS value. */
3582 case By_Descriptor_A
:
3585 case By_Descriptor_NCA
:
3588 case By_Descriptor_SB
:
3592 case By_Descriptor_S
:
3598 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3599 are the same for all types. */
3601 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3602 record_type
, size_int (1), field_list
);
3604 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3605 record_type
, size_int (dtype
), field_list
);
3607 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3608 record_type
, size_int (klass
), field_list
);
3610 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3611 record_type
, size_int (-1), field_list
);
3613 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3615 size_in_bytes (mech
== By_Descriptor_A
3616 ? inner_type
: type
),
3619 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3622 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3623 build_unary_op (ADDR_EXPR
, pointer64_type
,
3624 build0 (PLACEHOLDER_EXPR
, type
)),
3630 case By_Descriptor_S
:
3633 case By_Descriptor_SB
:
3635 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3637 (TREE_CODE (type
) == ARRAY_TYPE
3638 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3642 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3644 (TREE_CODE (type
) == ARRAY_TYPE
3645 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3650 case By_Descriptor_A
:
3651 case By_Descriptor_NCA
:
3653 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3654 record_type
, size_zero_node
, field_list
);
3657 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3658 record_type
, size_zero_node
, field_list
);
3660 dtype
= (mech
== By_Descriptor_NCA
3662 /* Set FL_COLUMN, FL_COEFF, and
3664 : (TREE_CODE (type
) == ARRAY_TYPE
3665 && TYPE_CONVENTION_FORTRAN_P (type
)
3668 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3669 record_type
, size_int (dtype
),
3673 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3674 record_type
, size_int (ndim
), field_list
);
3677 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3678 record_type
, size_int (0), field_list
);
3680 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3681 record_type
, size_in_bytes (type
),
3684 /* Now build a pointer to the 0,0,0... element. */
3685 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3686 for (i
= 0, inner_type
= type
; i
< ndim
;
3687 i
++, inner_type
= TREE_TYPE (inner_type
))
3688 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3689 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3690 NULL_TREE
, NULL_TREE
);
3693 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3694 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3697 /* Next come the addressing coefficients. */
3698 tem
= size_one_node
;
3699 for (i
= 0; i
< ndim
; i
++)
3703 = size_binop (MULT_EXPR
, tem
,
3704 size_binop (PLUS_EXPR
,
3705 size_binop (MINUS_EXPR
,
3706 TYPE_MAX_VALUE (idx_arr
[i
]),
3707 TYPE_MIN_VALUE (idx_arr
[i
])),
3710 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3711 fname
[1] = '0' + i
, fname
[2] = 0;
3713 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3714 record_type
, idx_length
, field_list
);
3716 if (mech
== By_Descriptor_NCA
)
3720 /* Finally here are the bounds. */
3721 for (i
= 0; i
< ndim
; i
++)
3725 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3727 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3729 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3733 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3735 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3740 post_error ("unsupported descriptor type for &", gnat_entity
);
3743 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3744 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3748 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3749 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3752 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3754 VEC(constructor_elt
,gc
) *v
= NULL
;
3757 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3758 gnu_expr
= gnat_protect_expr (gnu_expr
);
3759 gnat_mark_addressable (gnu_expr
);
3761 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3762 routine in case we have a 32-bit descriptor. */
3763 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3764 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3765 N_Raise_Constraint_Error
),
3768 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3771 = convert (TREE_TYPE (field
),
3772 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3774 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3777 return gnat_build_constructor (gnu_type
, v
);
3780 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3781 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3782 which the VMS descriptor is passed. */
3785 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3787 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3788 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3789 /* The CLASS field is the 3rd field in the descriptor. */
3790 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3791 /* The POINTER field is the 6th field in the descriptor. */
3792 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3794 /* Retrieve the value of the POINTER field. */
3796 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3798 if (POINTER_TYPE_P (gnu_type
))
3799 return convert (gnu_type
, gnu_expr64
);
3801 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3803 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3804 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3805 tree template_type
= TREE_TYPE (p_bounds_type
);
3806 tree min_field
= TYPE_FIELDS (template_type
);
3807 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3808 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3809 /* See the head comment of build_vms_descriptor. */
3810 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3811 tree lfield
, ufield
;
3812 VEC(constructor_elt
,gc
) *v
;
3814 /* Convert POINTER to the pointer-to-array type. */
3815 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3819 case 1: /* Class S */
3820 case 15: /* Class SB */
3821 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3822 v
= VEC_alloc (constructor_elt
, gc
, 2);
3823 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3824 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3825 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3826 convert (TREE_TYPE (min_field
),
3828 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3829 convert (TREE_TYPE (max_field
), t
));
3830 template_tree
= gnat_build_constructor (template_type
, v
);
3831 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3833 /* For class S, we are done. */
3837 /* Test that we really have a SB descriptor, like DEC Ada. */
3838 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3839 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3840 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3841 /* If so, there is already a template in the descriptor and
3842 it is located right after the POINTER field. The fields are
3843 64bits so they must be repacked. */
3844 t
= DECL_CHAIN (pointer
);
3845 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3846 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3849 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3851 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3853 /* Build the template in the form of a constructor. */
3854 v
= VEC_alloc (constructor_elt
, gc
, 2);
3855 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3856 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3858 template_tree
= gnat_build_constructor (template_type
, v
);
3860 /* Otherwise use the {1, LENGTH} template we build above. */
3861 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3862 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3867 case 4: /* Class A */
3868 /* The AFLAGS field is the 3rd field after the pointer in the
3870 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3871 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3872 /* The DIMCT field is the next field in the descriptor after
3875 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3876 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3877 or FL_COEFF or FL_BOUNDS not set. */
3878 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3879 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3880 build_binary_op (NE_EXPR
, boolean_type_node
,
3882 convert (TREE_TYPE (dimct
),
3884 build_binary_op (NE_EXPR
, boolean_type_node
,
3885 build2 (BIT_AND_EXPR
,
3889 /* There is already a template in the descriptor and it is located
3890 in block 3. The fields are 64bits so they must be repacked. */
3891 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3893 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3894 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3897 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3899 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3901 /* Build the template in the form of a constructor. */
3902 v
= VEC_alloc (constructor_elt
, gc
, 2);
3903 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3904 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3906 template_tree
= gnat_build_constructor (template_type
, v
);
3907 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3908 build_call_raise (CE_Length_Check_Failed
, Empty
,
3909 N_Raise_Constraint_Error
),
3912 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3915 case 10: /* Class NCA */
3917 post_error ("unsupported descriptor type for &", gnat_subprog
);
3918 template_addr
= integer_zero_node
;
3922 /* Build the fat pointer in the form of a constructor. */
3923 v
= VEC_alloc (constructor_elt
, gc
, 2);
3924 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3925 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3927 return gnat_build_constructor (gnu_type
, v
);
3934 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3935 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3936 which the VMS descriptor is passed. */
3939 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3941 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3942 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3943 /* The CLASS field is the 3rd field in the descriptor. */
3944 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3945 /* The POINTER field is the 4th field in the descriptor. */
3946 tree pointer
= DECL_CHAIN (klass
);
3948 /* Retrieve the value of the POINTER field. */
3950 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3952 if (POINTER_TYPE_P (gnu_type
))
3953 return convert (gnu_type
, gnu_expr32
);
3955 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3957 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3958 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3959 tree template_type
= TREE_TYPE (p_bounds_type
);
3960 tree min_field
= TYPE_FIELDS (template_type
);
3961 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3962 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3963 /* See the head comment of build_vms_descriptor. */
3964 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3965 VEC(constructor_elt
,gc
) *v
;
3967 /* Convert POINTER to the pointer-to-array type. */
3968 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3972 case 1: /* Class S */
3973 case 15: /* Class SB */
3974 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3975 v
= VEC_alloc (constructor_elt
, gc
, 2);
3976 t
= TYPE_FIELDS (desc_type
);
3977 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3978 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3979 convert (TREE_TYPE (min_field
),
3981 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3982 convert (TREE_TYPE (max_field
), t
));
3983 template_tree
= gnat_build_constructor (template_type
, v
);
3984 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3986 /* For class S, we are done. */
3990 /* Test that we really have a SB descriptor, like DEC Ada. */
3991 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3992 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3993 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3994 /* If so, there is already a template in the descriptor and
3995 it is located right after the POINTER field. */
3996 t
= DECL_CHAIN (pointer
);
3998 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3999 /* Otherwise use the {1, LENGTH} template we build above. */
4000 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
4001 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4006 case 4: /* Class A */
4007 /* The AFLAGS field is the 7th field in the descriptor. */
4008 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4009 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4010 /* The DIMCT field is the 8th field in the descriptor. */
4012 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4013 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4014 or FL_COEFF or FL_BOUNDS not set. */
4015 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4016 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4017 build_binary_op (NE_EXPR
, boolean_type_node
,
4019 convert (TREE_TYPE (dimct
),
4021 build_binary_op (NE_EXPR
, boolean_type_node
,
4022 build2 (BIT_AND_EXPR
,
4026 /* There is already a template in the descriptor and it is
4027 located at the start of block 3 (12th field). */
4028 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4030 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4031 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4032 build_call_raise (CE_Length_Check_Failed
, Empty
,
4033 N_Raise_Constraint_Error
),
4036 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4039 case 10: /* Class NCA */
4041 post_error ("unsupported descriptor type for &", gnat_subprog
);
4042 template_addr
= integer_zero_node
;
4046 /* Build the fat pointer in the form of a constructor. */
4047 v
= VEC_alloc (constructor_elt
, gc
, 2);
4048 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4049 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4052 return gnat_build_constructor (gnu_type
, v
);
4059 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4060 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4061 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4062 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4066 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4067 bool by_ref
, Entity_Id gnat_subprog
)
4069 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4070 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4071 tree mbo
= TYPE_FIELDS (desc_type
);
4072 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4073 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4074 tree real_type
, is64bit
, gnu_expr32
, gnu_expr64
;
4077 real_type
= TREE_TYPE (gnu_type
);
4079 real_type
= gnu_type
;
4081 /* If the field name is not MBO, it must be 32-bit and no alternate.
4082 Otherwise primary must be 64-bit and alternate 32-bit. */
4083 if (strcmp (mbostr
, "MBO") != 0)
4085 tree ret
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4087 ret
= build_unary_op (ADDR_EXPR
, gnu_type
, ret
);
4091 /* Build the test for 64-bit descriptor. */
4092 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4093 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4095 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4096 build_binary_op (EQ_EXPR
, boolean_type_node
,
4097 convert (integer_type_node
, mbo
),
4099 build_binary_op (EQ_EXPR
, boolean_type_node
,
4100 convert (integer_type_node
, mbmo
),
4101 integer_minus_one_node
));
4103 /* Build the 2 possible end results. */
4104 gnu_expr64
= convert_vms_descriptor64 (real_type
, gnu_expr
, gnat_subprog
);
4106 gnu_expr64
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr64
);
4107 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4108 gnu_expr32
= convert_vms_descriptor32 (real_type
, gnu_expr
, gnat_subprog
);
4110 gnu_expr32
= build_unary_op (ADDR_EXPR
, gnu_type
, gnu_expr32
);
4112 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4115 /* Build a type to be used to represent an aliased object whose nominal type
4116 is an unconstrained array. This consists of a RECORD_TYPE containing a
4117 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4118 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4119 an arbitrary unconstrained object. Use NAME as the name of the record.
4120 DEBUG_INFO_P is true if we need to write debug information for the type. */
4123 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4126 tree type
= make_node (RECORD_TYPE
);
4128 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4129 NULL_TREE
, NULL_TREE
, 0, 1);
4131 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4132 NULL_TREE
, NULL_TREE
, 0, 1);
4134 TYPE_NAME (type
) = name
;
4135 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4136 DECL_CHAIN (template_field
) = array_field
;
4137 finish_record_type (type
, template_field
, 0, true);
4139 /* Declare it now since it will never be declared otherwise. This is
4140 necessary to ensure that its subtrees are properly marked. */
4141 create_type_decl (name
, type
, NULL
, true, debug_info_p
, Empty
);
4146 /* Same, taking a thin or fat pointer type instead of a template type. */
4149 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4150 tree name
, bool debug_info_p
)
4154 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4157 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4158 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4159 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4162 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4165 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4166 In the normal case this is just two adjustments, but we have more to
4167 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4170 update_pointer_to (tree old_type
, tree new_type
)
4172 tree ptr
= TYPE_POINTER_TO (old_type
);
4173 tree ref
= TYPE_REFERENCE_TO (old_type
);
4176 /* If this is the main variant, process all the other variants first. */
4177 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4178 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4179 update_pointer_to (t
, new_type
);
4181 /* If no pointers and no references, we are done. */
4185 /* Merge the old type qualifiers in the new type.
4187 Each old variant has qualifiers for specific reasons, and the new
4188 designated type as well. Each set of qualifiers represents useful
4189 information grabbed at some point, and merging the two simply unifies
4190 these inputs into the final type description.
4192 Consider for instance a volatile type frozen after an access to constant
4193 type designating it; after the designated type's freeze, we get here with
4194 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4195 when the access type was processed. We will make a volatile and readonly
4196 designated type, because that's what it really is.
4198 We might also get here for a non-dummy OLD_TYPE variant with different
4199 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4200 to private record type elaboration (see the comments around the call to
4201 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4202 the qualifiers in those cases too, to avoid accidentally discarding the
4203 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4205 = build_qualified_type (new_type
,
4206 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4208 /* If old type and new type are identical, there is nothing to do. */
4209 if (old_type
== new_type
)
4212 /* Otherwise, first handle the simple case. */
4213 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4215 tree new_ptr
, new_ref
;
4217 /* If pointer or reference already points to new type, nothing to do.
4218 This can happen as update_pointer_to can be invoked multiple times
4219 on the same couple of types because of the type variants. */
4220 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4221 || (ref
&& TREE_TYPE (ref
) == new_type
))
4224 /* Chain PTR and its variants at the end. */
4225 new_ptr
= TYPE_POINTER_TO (new_type
);
4228 while (TYPE_NEXT_PTR_TO (new_ptr
))
4229 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4230 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4233 TYPE_POINTER_TO (new_type
) = ptr
;
4235 /* Now adjust them. */
4236 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4237 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4239 TREE_TYPE (t
) = new_type
;
4240 if (TYPE_NULL_BOUNDS (t
))
4241 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4244 /* Chain REF and its variants at the end. */
4245 new_ref
= TYPE_REFERENCE_TO (new_type
);
4248 while (TYPE_NEXT_REF_TO (new_ref
))
4249 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4250 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4253 TYPE_REFERENCE_TO (new_type
) = ref
;
4255 /* Now adjust them. */
4256 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4257 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4258 TREE_TYPE (t
) = new_type
;
4260 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4261 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4264 /* Now deal with the unconstrained array case. In this case the pointer
4265 is actually a record where both fields are pointers to dummy nodes.
4266 Turn them into pointers to the correct types using update_pointer_to.
4267 Likewise for the pointer to the object record (thin pointer). */
4270 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4272 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4274 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4275 since update_pointer_to can be invoked multiple times on the same
4276 couple of types because of the type variants. */
4277 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4281 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4282 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4285 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4286 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4288 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4289 TYPE_OBJECT_RECORD_TYPE (new_type
));
4291 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4295 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4296 unconstrained one. This involves making or finding a template. */
4299 convert_to_fat_pointer (tree type
, tree expr
)
4301 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4302 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4303 tree etype
= TREE_TYPE (expr
);
4305 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4307 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4308 array (compare_fat_pointers ensures that this is the full discriminant)
4309 and a valid pointer to the bounds. This latter property is necessary
4310 since the compiler can hoist the load of the bounds done through it. */
4311 if (integer_zerop (expr
))
4313 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4314 tree null_bounds
, t
;
4316 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4317 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4320 /* The template type can still be dummy at this point so we build an
4321 empty constructor. The middle-end will fill it in with zeros. */
4322 t
= build_constructor (template_type
, NULL
);
4323 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4324 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4325 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4328 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4329 fold_convert (p_array_type
, null_pointer_node
));
4330 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4331 t
= build_constructor (type
, v
);
4332 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4333 TREE_CONSTANT (t
) = 0;
4334 TREE_STATIC (t
) = 1;
4339 /* If EXPR is a thin pointer, make template and data from the record. */
4340 if (TYPE_IS_THIN_POINTER_P (etype
))
4342 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4344 expr
= gnat_protect_expr (expr
);
4345 if (TREE_CODE (expr
) == ADDR_EXPR
)
4346 expr
= TREE_OPERAND (expr
, 0);
4349 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4350 the thin pointer value has been shifted so we first need to shift
4351 it back to get the template address. */
4352 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4354 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4355 fold_build1 (NEGATE_EXPR
, sizetype
,
4357 (DECL_CHAIN (field
))));
4358 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
4361 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
4362 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4363 build_component_ref (expr
, NULL_TREE
,
4364 DECL_CHAIN (field
), false));
4367 /* Otherwise, build the constructor for the template. */
4369 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
4371 /* The final result is a constructor for the fat pointer.
4373 If EXPR is an argument of a foreign convention subprogram, the type it
4374 points to is directly the component type. In this case, the expression
4375 type may not match the corresponding FIELD_DECL type at this point, so we
4376 call "convert" here to fix that up if necessary. This type consistency is
4377 required, for instance because it ensures that possible later folding of
4378 COMPONENT_REFs against this constructor always yields something of the
4379 same type as the initial reference.
4381 Note that the call to "build_template" above is still fine because it
4382 will only refer to the provided TEMPLATE_TYPE in this case. */
4383 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4384 convert (p_array_type
, expr
));
4385 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4386 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4388 return gnat_build_constructor (type
, v
);
4391 /* Create an expression whose value is that of EXPR,
4392 converted to type TYPE. The TREE_TYPE of the value
4393 is always TYPE. This function implements all reasonable
4394 conversions; callers should filter out those that are
4395 not permitted by the language being compiled. */
4398 convert (tree type
, tree expr
)
4400 tree etype
= TREE_TYPE (expr
);
4401 enum tree_code ecode
= TREE_CODE (etype
);
4402 enum tree_code code
= TREE_CODE (type
);
4404 /* If the expression is already of the right type, we are done. */
4408 /* If both input and output have padding and are of variable size, do this
4409 as an unchecked conversion. Likewise if one is a mere variant of the
4410 other, so we avoid a pointless unpad/repad sequence. */
4411 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4412 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4413 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4414 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4415 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4416 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4417 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4420 /* If the output type has padding, convert to the inner type and make a
4421 constructor to build the record, unless a variable size is involved. */
4422 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4424 VEC(constructor_elt
,gc
) *v
;
4426 /* If we previously converted from another type and our type is
4427 of variable size, remove the conversion to avoid the need for
4428 variable-sized temporaries. Likewise for a conversion between
4429 original and packable version. */
4430 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4431 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4432 || (ecode
== RECORD_TYPE
4433 && TYPE_NAME (etype
)
4434 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4435 expr
= TREE_OPERAND (expr
, 0);
4437 /* If we are just removing the padding from expr, convert the original
4438 object if we have variable size in order to avoid the need for some
4439 variable-sized temporaries. Likewise if the padding is a variant
4440 of the other, so we avoid a pointless unpad/repad sequence. */
4441 if (TREE_CODE (expr
) == COMPONENT_REF
4442 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4443 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4444 || TYPE_MAIN_VARIANT (type
)
4445 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4446 || (ecode
== RECORD_TYPE
4447 && TYPE_NAME (etype
)
4448 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4449 return convert (type
, TREE_OPERAND (expr
, 0));
4451 /* If the inner type is of self-referential size and the expression type
4452 is a record, do this as an unchecked conversion. But first pad the
4453 expression if possible to have the same size on both sides. */
4454 if (ecode
== RECORD_TYPE
4455 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4457 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4458 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4459 false, false, false, true),
4461 return unchecked_convert (type
, expr
, false);
4464 /* If we are converting between array types with variable size, do the
4465 final conversion as an unchecked conversion, again to avoid the need
4466 for some variable-sized temporaries. If valid, this conversion is
4467 very likely purely technical and without real effects. */
4468 if (ecode
== ARRAY_TYPE
4469 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4470 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4471 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4472 return unchecked_convert (type
,
4473 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4477 v
= VEC_alloc (constructor_elt
, gc
, 1);
4478 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4479 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4480 return gnat_build_constructor (type
, v
);
4483 /* If the input type has padding, remove it and convert to the output type.
4484 The conditions ordering is arranged to ensure that the output type is not
4485 a padding type here, as it is not clear whether the conversion would
4486 always be correct if this was to happen. */
4487 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4491 /* If we have just converted to this padded type, just get the
4492 inner expression. */
4493 if (TREE_CODE (expr
) == CONSTRUCTOR
4494 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
4495 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0).index
4496 == TYPE_FIELDS (etype
))
4498 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0).value
;
4500 /* Otherwise, build an explicit component reference. */
4503 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4505 return convert (type
, unpadded
);
4508 /* If the input is a biased type, adjust first. */
4509 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4510 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4511 fold_convert (TREE_TYPE (etype
),
4513 TYPE_MIN_VALUE (etype
)));
4515 /* If the input is a justified modular type, we need to extract the actual
4516 object before converting it to any other type with the exceptions of an
4517 unconstrained array or of a mere type variant. It is useful to avoid the
4518 extraction and conversion in the type variant case because it could end
4519 up replacing a VAR_DECL expr by a constructor and we might be about the
4520 take the address of the result. */
4521 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4522 && code
!= UNCONSTRAINED_ARRAY_TYPE
4523 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4524 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4525 TYPE_FIELDS (etype
), false));
4527 /* If converting to a type that contains a template, convert to the data
4528 type and then build the template. */
4529 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4531 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4532 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 2);
4534 /* If the source already has a template, get a reference to the
4535 associated array only, as we are going to rebuild a template
4536 for the target type anyway. */
4537 expr
= maybe_unconstrained_array (expr
);
4539 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4540 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4541 obj_type
, NULL_TREE
));
4542 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4543 convert (obj_type
, expr
));
4544 return gnat_build_constructor (type
, v
);
4547 /* There are some cases of expressions that we process specially. */
4548 switch (TREE_CODE (expr
))
4554 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4555 conversion in gnat_expand_expr. NULL_EXPR does not represent
4556 and actual value, so no conversion is needed. */
4557 expr
= copy_node (expr
);
4558 TREE_TYPE (expr
) = type
;
4562 /* If we are converting a STRING_CST to another constrained array type,
4563 just make a new one in the proper type. */
4564 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4565 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4566 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4568 expr
= copy_node (expr
);
4569 TREE_TYPE (expr
) = type
;
4575 /* If we are converting a VECTOR_CST to a mere variant type, just make
4576 a new one in the proper type. */
4577 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4579 expr
= copy_node (expr
);
4580 TREE_TYPE (expr
) = type
;
4585 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4586 a new one in the proper type. */
4587 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4589 expr
= copy_node (expr
);
4590 TREE_TYPE (expr
) = type
;
4591 CONSTRUCTOR_ELTS (expr
)
4592 = VEC_copy (constructor_elt
, gc
, CONSTRUCTOR_ELTS (expr
));
4596 /* Likewise for a conversion between original and packable version, or
4597 conversion between types of the same size and with the same list of
4598 fields, but we have to work harder to preserve type consistency. */
4600 && code
== RECORD_TYPE
4601 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4602 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4605 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4606 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4607 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
4608 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4609 unsigned HOST_WIDE_INT idx
;
4612 /* Whether we need to clear TREE_CONSTANT et al. on the output
4613 constructor when we convert in place. */
4614 bool clear_constant
= false;
4616 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4618 /* We expect only simple constructors. */
4619 if (!SAME_FIELD_P (index
, efield
))
4621 /* The field must be the same. */
4622 if (!SAME_FIELD_P (efield
, field
))
4624 constructor_elt elt
= {field
, convert (TREE_TYPE (field
), value
)};
4625 VEC_quick_push (constructor_elt
, v
, elt
);
4627 /* If packing has made this field a bitfield and the input
4628 value couldn't be emitted statically any more, we need to
4629 clear TREE_CONSTANT on our output. */
4631 && TREE_CONSTANT (expr
)
4632 && !CONSTRUCTOR_BITFIELD_P (efield
)
4633 && CONSTRUCTOR_BITFIELD_P (field
)
4634 && !initializer_constant_valid_for_bitfield_p (value
))
4635 clear_constant
= true;
4637 efield
= DECL_CHAIN (efield
);
4638 field
= DECL_CHAIN (field
);
4641 /* If we have been able to match and convert all the input fields
4642 to their output type, convert in place now. We'll fallback to a
4643 view conversion downstream otherwise. */
4646 expr
= copy_node (expr
);
4647 TREE_TYPE (expr
) = type
;
4648 CONSTRUCTOR_ELTS (expr
) = v
;
4650 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4655 /* Likewise for a conversion between array type and vector type with a
4656 compatible representative array. */
4657 else if (code
== VECTOR_TYPE
4658 && ecode
== ARRAY_TYPE
4659 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4662 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
4663 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
4664 VEC(constructor_elt
,gc
) *v
;
4665 unsigned HOST_WIDE_INT ix
;
4668 /* Build a VECTOR_CST from a *constant* array constructor. */
4669 if (TREE_CONSTANT (expr
))
4671 bool constant_p
= true;
4673 /* Iterate through elements and check if all constructor
4674 elements are *_CSTs. */
4675 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4676 if (!CONSTANT_CLASS_P (value
))
4683 return build_vector_from_ctor (type
,
4684 CONSTRUCTOR_ELTS (expr
));
4687 /* Otherwise, build a regular vector constructor. */
4688 v
= VEC_alloc (constructor_elt
, gc
, len
);
4689 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4691 constructor_elt elt
= {NULL_TREE
, value
};
4692 VEC_quick_push (constructor_elt
, v
, elt
);
4694 expr
= copy_node (expr
);
4695 TREE_TYPE (expr
) = type
;
4696 CONSTRUCTOR_ELTS (expr
) = v
;
4701 case UNCONSTRAINED_ARRAY_REF
:
4702 /* First retrieve the underlying array. */
4703 expr
= maybe_unconstrained_array (expr
);
4704 etype
= TREE_TYPE (expr
);
4705 ecode
= TREE_CODE (etype
);
4708 case VIEW_CONVERT_EXPR
:
4710 /* GCC 4.x is very sensitive to type consistency overall, and view
4711 conversions thus are very frequent. Even though just "convert"ing
4712 the inner operand to the output type is fine in most cases, it
4713 might expose unexpected input/output type mismatches in special
4714 circumstances so we avoid such recursive calls when we can. */
4715 tree op0
= TREE_OPERAND (expr
, 0);
4717 /* If we are converting back to the original type, we can just
4718 lift the input conversion. This is a common occurrence with
4719 switches back-and-forth amongst type variants. */
4720 if (type
== TREE_TYPE (op0
))
4723 /* Otherwise, if we're converting between two aggregate or vector
4724 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4725 target type in place or to just convert the inner expression. */
4726 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4727 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4729 /* If we are converting between mere variants, we can just
4730 substitute the VIEW_CONVERT_EXPR in place. */
4731 if (gnat_types_compatible_p (type
, etype
))
4732 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4734 /* Otherwise, we may just bypass the input view conversion unless
4735 one of the types is a fat pointer, which is handled by
4736 specialized code below which relies on exact type matching. */
4737 else if (!TYPE_IS_FAT_POINTER_P (type
)
4738 && !TYPE_IS_FAT_POINTER_P (etype
))
4739 return convert (type
, op0
);
4749 /* Check for converting to a pointer to an unconstrained array. */
4750 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4751 return convert_to_fat_pointer (type
, expr
);
4753 /* If we are converting between two aggregate or vector types that are mere
4754 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4755 to a vector type from its representative array type. */
4756 else if ((code
== ecode
4757 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4758 && gnat_types_compatible_p (type
, etype
))
4759 || (code
== VECTOR_TYPE
4760 && ecode
== ARRAY_TYPE
4761 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4763 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4765 /* If we are converting between tagged types, try to upcast properly. */
4766 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4767 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4769 tree child_etype
= etype
;
4771 tree field
= TYPE_FIELDS (child_etype
);
4772 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4773 return build_component_ref (expr
, NULL_TREE
, field
, false);
4774 child_etype
= TREE_TYPE (field
);
4775 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4778 /* If we are converting from a smaller form of record type back to it, just
4779 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4780 size on both sides. */
4781 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4782 && smaller_form_type_p (etype
, type
))
4784 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4785 false, false, false, true),
4787 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4790 /* In all other cases of related types, make a NOP_EXPR. */
4791 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4792 return fold_convert (type
, expr
);
4797 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4800 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4801 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4802 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4803 return unchecked_convert (type
, expr
, false);
4804 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4805 return fold_convert (type
,
4806 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4807 convert (TREE_TYPE (type
), expr
),
4808 TYPE_MIN_VALUE (type
)));
4810 /* ... fall through ... */
4814 /* If we are converting an additive expression to an integer type
4815 with lower precision, be wary of the optimization that can be
4816 applied by convert_to_integer. There are 2 problematic cases:
4817 - if the first operand was originally of a biased type,
4818 because we could be recursively called to convert it
4819 to an intermediate type and thus rematerialize the
4820 additive operator endlessly,
4821 - if the expression contains a placeholder, because an
4822 intermediate conversion that changes the sign could
4823 be inserted and thus introduce an artificial overflow
4824 at compile time when the placeholder is substituted. */
4825 if (code
== INTEGER_TYPE
4826 && ecode
== INTEGER_TYPE
4827 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4828 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4830 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4832 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4833 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4834 || CONTAINS_PLACEHOLDER_P (expr
))
4835 return build1 (NOP_EXPR
, type
, expr
);
4838 return fold (convert_to_integer (type
, expr
));
4841 case REFERENCE_TYPE
:
4842 /* If converting between two thin pointers, adjust if needed to account
4843 for differing offsets from the base pointer, depending on whether
4844 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4845 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4848 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4849 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4852 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4853 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4855 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4857 expr
= build1 (NOP_EXPR
, type
, expr
);
4858 if (integer_zerop (byte_diff
))
4861 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4862 fold_convert (sizetype
, byte_diff
));
4865 /* If converting fat pointer to normal or thin pointer, get the pointer
4866 to the array and then convert it. */
4867 if (TYPE_IS_FAT_POINTER_P (etype
))
4869 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4871 return fold (convert_to_pointer (type
, expr
));
4874 return fold (convert_to_real (type
, expr
));
4877 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4879 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
4881 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4882 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4884 return gnat_build_constructor (type
, v
);
4887 /* ... fall through ... */
4890 /* In these cases, assume the front-end has validated the conversion.
4891 If the conversion is valid, it will be a bit-wise conversion, so
4892 it can be viewed as an unchecked conversion. */
4893 return unchecked_convert (type
, expr
, false);
4896 /* This is a either a conversion between a tagged type and some
4897 subtype, which we have to mark as a UNION_TYPE because of
4898 overlapping fields or a conversion of an Unchecked_Union. */
4899 return unchecked_convert (type
, expr
, false);
4901 case UNCONSTRAINED_ARRAY_TYPE
:
4902 /* If the input is a VECTOR_TYPE, convert to the representative
4903 array type first. */
4904 if (ecode
== VECTOR_TYPE
)
4906 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4907 etype
= TREE_TYPE (expr
);
4908 ecode
= TREE_CODE (etype
);
4911 /* If EXPR is a constrained array, take its address, convert it to a
4912 fat pointer, and then dereference it. Likewise if EXPR is a
4913 record containing both a template and a constrained array.
4914 Note that a record representing a justified modular type
4915 always represents a packed constrained array. */
4916 if (ecode
== ARRAY_TYPE
4917 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4918 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4919 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4922 (INDIRECT_REF
, NULL_TREE
,
4923 convert_to_fat_pointer (TREE_TYPE (type
),
4924 build_unary_op (ADDR_EXPR
,
4927 /* Do something very similar for converting one unconstrained
4928 array to another. */
4929 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4931 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4932 convert (TREE_TYPE (type
),
4933 build_unary_op (ADDR_EXPR
,
4939 return fold (convert_to_complex (type
, expr
));
4946 /* Create an expression whose value is that of EXPR converted to the common
4947 index type, which is sizetype. EXPR is supposed to be in the base type
4948 of the GNAT index type. Calling it is equivalent to doing
4950 convert (sizetype, expr)
4952 but we try to distribute the type conversion with the knowledge that EXPR
4953 cannot overflow in its type. This is a best-effort approach and we fall
4954 back to the above expression as soon as difficulties are encountered.
4956 This is necessary to overcome issues that arise when the GNAT base index
4957 type and the GCC common index type (sizetype) don't have the same size,
4958 which is quite frequent on 64-bit architectures. In this case, and if
4959 the GNAT base index type is signed but the iteration type of the loop has
4960 been forced to unsigned, the loop scalar evolution engine cannot compute
4961 a simple evolution for the general induction variables associated with the
4962 array indices, because it will preserve the wrap-around semantics in the
4963 unsigned type of their "inner" part. As a result, many loop optimizations
4966 The solution is to use a special (basic) induction variable that is at
4967 least as large as sizetype, and to express the aforementioned general
4968 induction variables in terms of this induction variable, eliminating
4969 the problematic intermediate truncation to the GNAT base index type.
4970 This is possible as long as the original expression doesn't overflow
4971 and if the middle-end hasn't introduced artificial overflows in the
4972 course of the various simplification it can make to the expression. */
4975 convert_to_index_type (tree expr
)
4977 enum tree_code code
= TREE_CODE (expr
);
4978 tree type
= TREE_TYPE (expr
);
4980 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4981 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4982 if (TYPE_UNSIGNED (type
) || !optimize
)
4983 return convert (sizetype
, expr
);
4988 /* The main effect of the function: replace a loop parameter with its
4989 associated special induction variable. */
4990 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
4991 expr
= DECL_INDUCTION_VAR (expr
);
4996 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4997 /* Bail out as soon as we suspect some sort of type frobbing. */
4998 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
4999 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
5003 /* ... fall through ... */
5005 case NON_LVALUE_EXPR
:
5006 return fold_build1 (code
, sizetype
,
5007 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5012 return fold_build2 (code
, sizetype
,
5013 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5014 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5017 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5018 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5021 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5022 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5023 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5029 return convert (sizetype
, expr
);
5032 /* Remove all conversions that are done in EXP. This includes converting
5033 from a padded type or to a justified modular type. If TRUE_ADDRESS
5034 is true, always return the address of the containing object even if
5035 the address is not bit-aligned. */
5038 remove_conversions (tree exp
, bool true_address
)
5040 switch (TREE_CODE (exp
))
5044 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5045 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5047 remove_conversions (VEC_index (constructor_elt
,
5048 CONSTRUCTOR_ELTS (exp
), 0).value
,
5053 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5054 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5058 case VIEW_CONVERT_EXPR
:
5059 case NON_LVALUE_EXPR
:
5060 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5069 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5070 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5071 likewise return an expression pointing to the underlying array. */
5074 maybe_unconstrained_array (tree exp
)
5076 enum tree_code code
= TREE_CODE (exp
);
5077 tree type
= TREE_TYPE (exp
);
5079 switch (TREE_CODE (type
))
5081 case UNCONSTRAINED_ARRAY_TYPE
:
5082 if (code
== UNCONSTRAINED_ARRAY_REF
)
5084 const bool read_only
= TREE_READONLY (exp
);
5085 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5087 exp
= TREE_OPERAND (exp
, 0);
5088 type
= TREE_TYPE (exp
);
5090 if (TREE_CODE (exp
) == COND_EXPR
)
5093 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5094 build_component_ref (TREE_OPERAND (exp
, 1),
5099 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5100 build_component_ref (TREE_OPERAND (exp
, 2),
5105 exp
= build3 (COND_EXPR
,
5106 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5107 TREE_OPERAND (exp
, 0), op1
, op2
);
5111 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5112 build_component_ref (exp
, NULL_TREE
,
5115 TREE_READONLY (exp
) = read_only
;
5116 TREE_THIS_NOTRAP (exp
) = no_trap
;
5120 else if (code
== NULL_EXPR
)
5121 exp
= build1 (NULL_EXPR
,
5122 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5123 TREE_OPERAND (exp
, 0));
5127 /* If this is a padded type and it contains a template, convert to the
5128 unpadded type first. */
5129 if (TYPE_PADDING_P (type
)
5130 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5131 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5133 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5134 type
= TREE_TYPE (exp
);
5137 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5139 exp
= build_component_ref (exp
, NULL_TREE
,
5140 DECL_CHAIN (TYPE_FIELDS (type
)),
5142 type
= TREE_TYPE (exp
);
5144 /* If the array type is padded, convert to the unpadded type. */
5145 if (TYPE_IS_PADDING_P (type
))
5146 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5157 /* Return true if EXPR is an expression that can be folded as an operand
5158 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5161 can_fold_for_view_convert_p (tree expr
)
5165 /* The folder will fold NOP_EXPRs between integral types with the same
5166 precision (in the middle-end's sense). We cannot allow it if the
5167 types don't have the same precision in the Ada sense as well. */
5168 if (TREE_CODE (expr
) != NOP_EXPR
)
5171 t1
= TREE_TYPE (expr
);
5172 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5174 /* Defer to the folder for non-integral conversions. */
5175 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5178 /* Only fold conversions that preserve both precisions. */
5179 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5180 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5186 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5187 If NOTRUNC_P is true, truncation operations should be suppressed.
5189 Special care is required with (source or target) integral types whose
5190 precision is not equal to their size, to make sure we fetch or assign
5191 the value bits whose location might depend on the endianness, e.g.
5193 Rmsize : constant := 8;
5194 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5196 type Bit_Array is array (1 .. Rmsize) of Boolean;
5197 pragma Pack (Bit_Array);
5199 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5201 Value : Int := 2#1000_0001#;
5202 Vbits : Bit_Array := To_Bit_Array (Value);
5204 we expect the 8 bits at Vbits'Address to always contain Value, while
5205 their original location depends on the endianness, at Value'Address
5206 on a little-endian architecture but not on a big-endian one. */
5209 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5211 tree etype
= TREE_TYPE (expr
);
5212 enum tree_code ecode
= TREE_CODE (etype
);
5213 enum tree_code code
= TREE_CODE (type
);
5216 /* If the expression is already of the right type, we are done. */
5220 /* If both types types are integral just do a normal conversion.
5221 Likewise for a conversion to an unconstrained array. */
5222 if ((((INTEGRAL_TYPE_P (type
)
5223 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5224 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5225 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5226 && ((INTEGRAL_TYPE_P (etype
)
5227 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5228 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5229 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5230 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5232 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5234 tree ntype
= copy_type (etype
);
5235 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5236 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5237 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5240 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5242 tree rtype
= copy_type (type
);
5243 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5244 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5245 expr
= convert (rtype
, expr
);
5246 expr
= build1 (NOP_EXPR
, type
, expr
);
5249 expr
= convert (type
, expr
);
5252 /* If we are converting to an integral type whose precision is not equal
5253 to its size, first unchecked convert to a record type that contains an
5254 field of the given precision. Then extract the field. */
5255 else if (INTEGRAL_TYPE_P (type
)
5256 && TYPE_RM_SIZE (type
)
5257 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5258 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5260 tree rec_type
= make_node (RECORD_TYPE
);
5261 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5262 tree field_type
, field
;
5264 if (TYPE_UNSIGNED (type
))
5265 field_type
= make_unsigned_type (prec
);
5267 field_type
= make_signed_type (prec
);
5268 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5270 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5271 NULL_TREE
, NULL_TREE
, 1, 0);
5273 TYPE_FIELDS (rec_type
) = field
;
5274 layout_type (rec_type
);
5276 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5277 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5278 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5281 /* Similarly if we are converting from an integral type whose precision is
5282 not equal to its size, first copy into a field of the given precision
5283 and unchecked convert the record type. */
5284 else if (INTEGRAL_TYPE_P (etype
)
5285 && TYPE_RM_SIZE (etype
)
5286 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5287 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5289 tree rec_type
= make_node (RECORD_TYPE
);
5290 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5291 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, 1);
5292 tree field_type
, field
;
5294 if (TYPE_UNSIGNED (etype
))
5295 field_type
= make_unsigned_type (prec
);
5297 field_type
= make_signed_type (prec
);
5298 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5300 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5301 NULL_TREE
, NULL_TREE
, 1, 0);
5303 TYPE_FIELDS (rec_type
) = field
;
5304 layout_type (rec_type
);
5306 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5307 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5308 expr
= gnat_build_constructor (rec_type
, v
);
5309 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5312 /* If we are converting from a scalar type to a type with a different size,
5313 we need to pad to have the same size on both sides.
5315 ??? We cannot do it unconditionally because unchecked conversions are
5316 used liberally by the front-end to implement polymorphism, e.g. in:
5318 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5319 return p___size__4 (p__object!(S191s.all));
5321 so we skip all expressions that are references. */
5322 else if (!REFERENCE_CLASS_P (expr
)
5323 && !AGGREGATE_TYPE_P (etype
)
5324 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5325 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5329 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5330 false, false, false, true),
5332 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5336 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5337 false, false, false, true);
5338 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5339 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5344 /* We have a special case when we are converting between two unconstrained
5345 array types. In that case, take the address, convert the fat pointer
5346 types, and dereference. */
5347 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5348 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5349 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5350 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5353 /* Another special case is when we are converting to a vector type from its
5354 representative array type; this a regular conversion. */
5355 else if (code
== VECTOR_TYPE
5356 && ecode
== ARRAY_TYPE
5357 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5359 expr
= convert (type
, expr
);
5363 expr
= maybe_unconstrained_array (expr
);
5364 etype
= TREE_TYPE (expr
);
5365 ecode
= TREE_CODE (etype
);
5366 if (can_fold_for_view_convert_p (expr
))
5367 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5369 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5372 /* If the result is an integral type whose precision is not equal to its
5373 size, sign- or zero-extend the result. We need not do this if the input
5374 is an integral type of the same precision and signedness or if the output
5375 is a biased type or if both the input and output are unsigned. */
5377 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5378 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5379 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5380 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5381 && !(INTEGRAL_TYPE_P (etype
)
5382 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5383 && operand_equal_p (TYPE_RM_SIZE (type
),
5384 (TYPE_RM_SIZE (etype
) != 0
5385 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5387 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5390 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5392 = convert (base_type
,
5393 size_binop (MINUS_EXPR
,
5395 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5396 TYPE_RM_SIZE (type
)));
5399 build_binary_op (RSHIFT_EXPR
, base_type
,
5400 build_binary_op (LSHIFT_EXPR
, base_type
,
5401 convert (base_type
, expr
),
5406 /* An unchecked conversion should never raise Constraint_Error. The code
5407 below assumes that GCC's conversion routines overflow the same way that
5408 the underlying hardware does. This is probably true. In the rare case
5409 when it is false, we can rely on the fact that such conversions are
5410 erroneous anyway. */
5411 if (TREE_CODE (expr
) == INTEGER_CST
)
5412 TREE_OVERFLOW (expr
) = 0;
5414 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5415 show no longer constant. */
5416 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5417 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5419 TREE_CONSTANT (expr
) = 0;
5424 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5425 the latter being a record type as predicated by Is_Record_Type. */
5428 tree_code_for_record_type (Entity_Id gnat_type
)
5430 Node_Id component_list
, component
;
5432 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5433 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5434 if (!Is_Unchecked_Union (gnat_type
))
5437 gnat_type
= Implementation_Base_Type (gnat_type
);
5439 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5441 for (component
= First_Non_Pragma (Component_Items (component_list
));
5442 Present (component
);
5443 component
= Next_Non_Pragma (component
))
5444 if (Ekind (Defining_Entity (component
)) == E_Component
)
5450 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5451 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5452 according to the presence of an alignment clause on the type or, if it
5453 is an array, on the component type. */
5456 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5458 gnat_type
= Underlying_Type (gnat_type
);
5460 *align_clause
= Present (Alignment_Clause (gnat_type
));
5462 if (Is_Array_Type (gnat_type
))
5464 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5465 if (Present (Alignment_Clause (gnat_type
)))
5466 *align_clause
= true;
5469 if (!Is_Floating_Point_Type (gnat_type
))
5472 if (UI_To_Int (Esize (gnat_type
)) != 64)
5478 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5479 size is greater or equal to 64 bits, or an array of such a type. Set
5480 ALIGN_CLAUSE according to the presence of an alignment clause on the
5481 type or, if it is an array, on the component type. */
5484 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5486 gnat_type
= Underlying_Type (gnat_type
);
5488 *align_clause
= Present (Alignment_Clause (gnat_type
));
5490 if (Is_Array_Type (gnat_type
))
5492 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5493 if (Present (Alignment_Clause (gnat_type
)))
5494 *align_clause
= true;
5497 if (!Is_Scalar_Type (gnat_type
))
5500 if (UI_To_Int (Esize (gnat_type
)) < 64)
5506 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5507 component of an aggregate type. */
5510 type_for_nonaliased_component_p (tree gnu_type
)
5512 /* If the type is passed by reference, we may have pointers to the
5513 component so it cannot be made non-aliased. */
5514 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5517 /* We used to say that any component of aggregate type is aliased
5518 because the front-end may take 'Reference of it. The front-end
5519 has been enhanced in the meantime so as to use a renaming instead
5520 in most cases, but the back-end can probably take the address of
5521 such a component too so we go for the conservative stance.
5523 For instance, we might need the address of any array type, even
5524 if normally passed by copy, to construct a fat pointer if the
5525 component is used as an actual for an unconstrained formal.
5527 Likewise for record types: even if a specific record subtype is
5528 passed by copy, the parent type might be passed by ref (e.g. if
5529 it's of variable size) and we might take the address of a child
5530 component to pass to a parent formal. We have no way to check
5531 for such conditions here. */
5532 if (AGGREGATE_TYPE_P (gnu_type
))
5538 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5541 smaller_form_type_p (tree type
, tree orig_type
)
5545 /* We're not interested in variants here. */
5546 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5549 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5550 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5553 size
= TYPE_SIZE (type
);
5554 osize
= TYPE_SIZE (orig_type
);
5556 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5559 return tree_int_cst_lt (size
, osize
) != 0;
5562 /* Perform final processing on global variables. */
5564 static GTY (()) tree dummy_global
;
5567 gnat_write_global_declarations (void)
5572 /* If we have declared types as used at the global level, insert them in
5573 the global hash table. We use a dummy variable for this purpose. */
5574 if (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5576 struct varpool_node
*node
;
5579 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5581 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5583 TREE_STATIC (dummy_global
) = 1;
5584 TREE_ASM_WRITTEN (dummy_global
) = 1;
5585 node
= varpool_node (dummy_global
);
5586 node
->symbol
.force_output
= 1;
5588 while (!VEC_empty (tree
, types_used_by_cur_var_decl
))
5590 tree t
= VEC_pop (tree
, types_used_by_cur_var_decl
);
5591 types_used_by_var_decl_insert (t
, dummy_global
);
5595 /* Output debug information for all global type declarations first. This
5596 ensures that global types whose compilation hasn't been finalized yet,
5597 for example pointers to Taft amendment types, have their compilation
5598 finalized in the right context. */
5599 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5600 if (TREE_CODE (iter
) == TYPE_DECL
)
5601 debug_hooks
->global_decl (iter
);
5603 /* Proceed to optimize and emit assembly. */
5604 finalize_compilation_unit ();
5606 /* After cgraph has had a chance to emit everything that's going to
5607 be emitted, output debug information for the rest of globals. */
5610 timevar_push (TV_SYMOUT
);
5611 FOR_EACH_VEC_ELT (tree
, global_decls
, i
, iter
)
5612 if (TREE_CODE (iter
) != TYPE_DECL
)
5613 debug_hooks
->global_decl (iter
);
5614 timevar_pop (TV_SYMOUT
);
5618 /* ************************************************************************
5619 * * GCC builtins support *
5620 * ************************************************************************ */
5622 /* The general scheme is fairly simple:
5624 For each builtin function/type to be declared, gnat_install_builtins calls
5625 internal facilities which eventually get to gnat_push_decl, which in turn
5626 tracks the so declared builtin function decls in the 'builtin_decls' global
5627 datastructure. When an Intrinsic subprogram declaration is processed, we
5628 search this global datastructure to retrieve the associated BUILT_IN DECL
5631 /* Search the chain of currently available builtin declarations for a node
5632 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5633 found, if any, or NULL_TREE otherwise. */
5635 builtin_decl_for (tree name
)
5640 FOR_EACH_VEC_ELT (tree
, builtin_decls
, i
, decl
)
5641 if (DECL_NAME (decl
) == name
)
5647 /* The code below eventually exposes gnat_install_builtins, which declares
5648 the builtin types and functions we might need, either internally or as
5649 user accessible facilities.
5651 ??? This is a first implementation shot, still in rough shape. It is
5652 heavily inspired from the "C" family implementation, with chunks copied
5653 verbatim from there.
5655 Two obvious TODO candidates are
5656 o Use a more efficient name/decl mapping scheme
5657 o Devise a middle-end infrastructure to avoid having to copy
5658 pieces between front-ends. */
5660 /* ----------------------------------------------------------------------- *
5661 * BUILTIN ELEMENTARY TYPES *
5662 * ----------------------------------------------------------------------- */
5664 /* Standard data types to be used in builtin argument declarations. */
5668 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5670 CTI_CONST_STRING_TYPE
,
5675 static tree c_global_trees
[CTI_MAX
];
5677 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5678 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5679 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5681 /* ??? In addition some attribute handlers, we currently don't support a
5682 (small) number of builtin-types, which in turns inhibits support for a
5683 number of builtin functions. */
5684 #define wint_type_node void_type_node
5685 #define intmax_type_node void_type_node
5686 #define uintmax_type_node void_type_node
5688 /* Build the void_list_node (void_type_node having been created). */
5691 build_void_list_node (void)
5693 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5697 /* Used to help initialize the builtin-types.def table. When a type of
5698 the correct size doesn't exist, use error_mark_node instead of NULL.
5699 The later results in segfaults even when a decl using the type doesn't
5703 builtin_type_for_size (int size
, bool unsignedp
)
5705 tree type
= gnat_type_for_size (size
, unsignedp
);
5706 return type
? type
: error_mark_node
;
5709 /* Build/push the elementary type decls that builtin functions/types
5713 install_builtin_elementary_types (void)
5715 signed_size_type_node
= gnat_signed_type (size_type_node
);
5716 pid_type_node
= integer_type_node
;
5717 void_list_node
= build_void_list_node ();
5719 string_type_node
= build_pointer_type (char_type_node
);
5720 const_string_type_node
5721 = build_pointer_type (build_qualified_type
5722 (char_type_node
, TYPE_QUAL_CONST
));
5725 /* ----------------------------------------------------------------------- *
5726 * BUILTIN FUNCTION TYPES *
5727 * ----------------------------------------------------------------------- */
5729 /* Now, builtin function types per se. */
5733 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5734 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5735 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5736 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5737 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5738 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5739 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5740 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5741 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5742 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5743 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5744 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5745 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5746 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5747 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5749 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5750 #include "builtin-types.def"
5751 #undef DEF_PRIMITIVE_TYPE
5752 #undef DEF_FUNCTION_TYPE_0
5753 #undef DEF_FUNCTION_TYPE_1
5754 #undef DEF_FUNCTION_TYPE_2
5755 #undef DEF_FUNCTION_TYPE_3
5756 #undef DEF_FUNCTION_TYPE_4
5757 #undef DEF_FUNCTION_TYPE_5
5758 #undef DEF_FUNCTION_TYPE_6
5759 #undef DEF_FUNCTION_TYPE_7
5760 #undef DEF_FUNCTION_TYPE_VAR_0
5761 #undef DEF_FUNCTION_TYPE_VAR_1
5762 #undef DEF_FUNCTION_TYPE_VAR_2
5763 #undef DEF_FUNCTION_TYPE_VAR_3
5764 #undef DEF_FUNCTION_TYPE_VAR_4
5765 #undef DEF_FUNCTION_TYPE_VAR_5
5766 #undef DEF_POINTER_TYPE
5770 typedef enum c_builtin_type builtin_type
;
5772 /* A temporary array used in communication with def_fn_type. */
5773 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5775 /* A helper function for install_builtin_types. Build function type
5776 for DEF with return type RET and N arguments. If VAR is true, then the
5777 function should be variadic after those N arguments.
5779 Takes special care not to ICE if any of the types involved are
5780 error_mark_node, which indicates that said type is not in fact available
5781 (see builtin_type_for_size). In which case the function type as a whole
5782 should be error_mark_node. */
5785 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5788 tree
*args
= XALLOCAVEC (tree
, n
);
5793 for (i
= 0; i
< n
; ++i
)
5795 builtin_type a
= (builtin_type
) va_arg (list
, int);
5796 t
= builtin_types
[a
];
5797 if (t
== error_mark_node
)
5802 t
= builtin_types
[ret
];
5803 if (t
== error_mark_node
)
5806 t
= build_varargs_function_type_array (t
, n
, args
);
5808 t
= build_function_type_array (t
, n
, args
);
5811 builtin_types
[def
] = t
;
5815 /* Build the builtin function types and install them in the builtin_types
5816 array for later use in builtin function decls. */
5819 install_builtin_function_types (void)
5821 tree va_list_ref_type_node
;
5822 tree va_list_arg_type_node
;
5824 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5826 va_list_arg_type_node
= va_list_ref_type_node
=
5827 build_pointer_type (TREE_TYPE (va_list_type_node
));
5831 va_list_arg_type_node
= va_list_type_node
;
5832 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5835 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5836 builtin_types[ENUM] = VALUE;
5837 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5838 def_fn_type (ENUM, RETURN, 0, 0);
5839 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5840 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5841 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5842 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5843 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5844 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5845 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5846 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5847 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5848 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5849 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5851 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5852 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5854 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5855 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5856 def_fn_type (ENUM, RETURN, 1, 0);
5857 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5858 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5859 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5860 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5861 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5862 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5863 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5864 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5865 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5866 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5867 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5868 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5870 #include "builtin-types.def"
5872 #undef DEF_PRIMITIVE_TYPE
5873 #undef DEF_FUNCTION_TYPE_1
5874 #undef DEF_FUNCTION_TYPE_2
5875 #undef DEF_FUNCTION_TYPE_3
5876 #undef DEF_FUNCTION_TYPE_4
5877 #undef DEF_FUNCTION_TYPE_5
5878 #undef DEF_FUNCTION_TYPE_6
5879 #undef DEF_FUNCTION_TYPE_VAR_0
5880 #undef DEF_FUNCTION_TYPE_VAR_1
5881 #undef DEF_FUNCTION_TYPE_VAR_2
5882 #undef DEF_FUNCTION_TYPE_VAR_3
5883 #undef DEF_FUNCTION_TYPE_VAR_4
5884 #undef DEF_FUNCTION_TYPE_VAR_5
5885 #undef DEF_POINTER_TYPE
5886 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5889 /* ----------------------------------------------------------------------- *
5890 * BUILTIN ATTRIBUTES *
5891 * ----------------------------------------------------------------------- */
5893 enum built_in_attribute
5895 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5896 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5897 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5898 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5899 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5900 #include "builtin-attrs.def"
5901 #undef DEF_ATTR_NULL_TREE
5903 #undef DEF_ATTR_STRING
5904 #undef DEF_ATTR_IDENT
5905 #undef DEF_ATTR_TREE_LIST
5909 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5912 install_builtin_attributes (void)
5914 /* Fill in the built_in_attributes array. */
5915 #define DEF_ATTR_NULL_TREE(ENUM) \
5916 built_in_attributes[(int) ENUM] = NULL_TREE;
5917 #define DEF_ATTR_INT(ENUM, VALUE) \
5918 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5919 #define DEF_ATTR_STRING(ENUM, VALUE) \
5920 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5921 #define DEF_ATTR_IDENT(ENUM, STRING) \
5922 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5923 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5924 built_in_attributes[(int) ENUM] \
5925 = tree_cons (built_in_attributes[(int) PURPOSE], \
5926 built_in_attributes[(int) VALUE], \
5927 built_in_attributes[(int) CHAIN]);
5928 #include "builtin-attrs.def"
5929 #undef DEF_ATTR_NULL_TREE
5931 #undef DEF_ATTR_STRING
5932 #undef DEF_ATTR_IDENT
5933 #undef DEF_ATTR_TREE_LIST
5936 /* Handle a "const" attribute; arguments as in
5937 struct attribute_spec.handler. */
5940 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5941 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5944 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5945 TREE_READONLY (*node
) = 1;
5947 *no_add_attrs
= true;
5952 /* Handle a "nothrow" attribute; arguments as in
5953 struct attribute_spec.handler. */
5956 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5957 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5960 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5961 TREE_NOTHROW (*node
) = 1;
5963 *no_add_attrs
= true;
5968 /* Handle a "pure" attribute; arguments as in
5969 struct attribute_spec.handler. */
5972 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5973 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5975 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5976 DECL_PURE_P (*node
) = 1;
5977 /* ??? TODO: Support types. */
5980 warning (OPT_Wattributes
, "%qs attribute ignored",
5981 IDENTIFIER_POINTER (name
));
5982 *no_add_attrs
= true;
5988 /* Handle a "no vops" attribute; arguments as in
5989 struct attribute_spec.handler. */
5992 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5993 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5994 bool *ARG_UNUSED (no_add_attrs
))
5996 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
5997 DECL_IS_NOVOPS (*node
) = 1;
6001 /* Helper for nonnull attribute handling; fetch the operand number
6002 from the attribute argument list. */
6005 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6007 /* Verify the arg number is a constant. */
6008 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
6009 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
6012 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6016 /* Handle the "nonnull" attribute. */
6018 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6019 tree args
, int ARG_UNUSED (flags
),
6023 unsigned HOST_WIDE_INT attr_arg_num
;
6025 /* If no arguments are specified, all pointer arguments should be
6026 non-null. Verify a full prototype is given so that the arguments
6027 will have the correct types when we actually check them later. */
6030 if (!prototype_p (type
))
6032 error ("nonnull attribute without arguments on a non-prototype");
6033 *no_add_attrs
= true;
6038 /* Argument list specified. Verify that each argument number references
6039 a pointer argument. */
6040 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6042 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6044 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6046 error ("nonnull argument has invalid operand number (argument %lu)",
6047 (unsigned long) attr_arg_num
);
6048 *no_add_attrs
= true;
6052 if (prototype_p (type
))
6054 function_args_iterator iter
;
6057 function_args_iter_init (&iter
, type
);
6058 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6060 argument
= function_args_iter_cond (&iter
);
6061 if (!argument
|| ck_num
== arg_num
)
6066 || TREE_CODE (argument
) == VOID_TYPE
)
6068 error ("nonnull argument with out-of-range operand number "
6069 "(argument %lu, operand %lu)",
6070 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6071 *no_add_attrs
= true;
6075 if (TREE_CODE (argument
) != POINTER_TYPE
)
6077 error ("nonnull argument references non-pointer operand "
6078 "(argument %lu, operand %lu)",
6079 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6080 *no_add_attrs
= true;
6089 /* Handle a "sentinel" attribute. */
6092 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6093 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6095 if (!prototype_p (*node
))
6097 warning (OPT_Wattributes
,
6098 "%qs attribute requires prototypes with named arguments",
6099 IDENTIFIER_POINTER (name
));
6100 *no_add_attrs
= true;
6104 if (!stdarg_p (*node
))
6106 warning (OPT_Wattributes
,
6107 "%qs attribute only applies to variadic functions",
6108 IDENTIFIER_POINTER (name
));
6109 *no_add_attrs
= true;
6115 tree position
= TREE_VALUE (args
);
6117 if (TREE_CODE (position
) != INTEGER_CST
)
6119 warning (0, "requested position is not an integer constant");
6120 *no_add_attrs
= true;
6124 if (tree_int_cst_lt (position
, integer_zero_node
))
6126 warning (0, "requested position is less than zero");
6127 *no_add_attrs
= true;
6135 /* Handle a "noreturn" attribute; arguments as in
6136 struct attribute_spec.handler. */
6139 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6140 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6142 tree type
= TREE_TYPE (*node
);
6144 /* See FIXME comment in c_common_attribute_table. */
6145 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6146 TREE_THIS_VOLATILE (*node
) = 1;
6147 else if (TREE_CODE (type
) == POINTER_TYPE
6148 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6150 = build_pointer_type
6151 (build_type_variant (TREE_TYPE (type
),
6152 TYPE_READONLY (TREE_TYPE (type
)), 1));
6155 warning (OPT_Wattributes
, "%qs attribute ignored",
6156 IDENTIFIER_POINTER (name
));
6157 *no_add_attrs
= true;
6163 /* Handle a "leaf" attribute; arguments as in
6164 struct attribute_spec.handler. */
6167 handle_leaf_attribute (tree
*node
, tree name
,
6168 tree
ARG_UNUSED (args
),
6169 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6171 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6173 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6174 *no_add_attrs
= true;
6176 if (!TREE_PUBLIC (*node
))
6178 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6179 *no_add_attrs
= true;
6185 /* Handle a "malloc" attribute; arguments as in
6186 struct attribute_spec.handler. */
6189 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6190 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6192 if (TREE_CODE (*node
) == FUNCTION_DECL
6193 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6194 DECL_IS_MALLOC (*node
) = 1;
6197 warning (OPT_Wattributes
, "%qs attribute ignored",
6198 IDENTIFIER_POINTER (name
));
6199 *no_add_attrs
= true;
6205 /* Fake handler for attributes we don't properly support. */
6208 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6209 tree
ARG_UNUSED (name
),
6210 tree
ARG_UNUSED (args
),
6211 int ARG_UNUSED (flags
),
6212 bool * ARG_UNUSED (no_add_attrs
))
6217 /* Handle a "type_generic" attribute. */
6220 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6221 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6222 bool * ARG_UNUSED (no_add_attrs
))
6224 /* Ensure we have a function type. */
6225 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6227 /* Ensure we have a variadic function. */
6228 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6233 /* Handle a "vector_size" attribute; arguments as in
6234 struct attribute_spec.handler. */
6237 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6238 int ARG_UNUSED (flags
),
6241 unsigned HOST_WIDE_INT vecsize
, nunits
;
6242 enum machine_mode orig_mode
;
6243 tree type
= *node
, new_type
, size
;
6245 *no_add_attrs
= true;
6247 size
= TREE_VALUE (args
);
6249 if (!host_integerp (size
, 1))
6251 warning (OPT_Wattributes
, "%qs attribute ignored",
6252 IDENTIFIER_POINTER (name
));
6256 /* Get the vector size (in bytes). */
6257 vecsize
= tree_low_cst (size
, 1);
6259 /* We need to provide for vector pointers, vector arrays, and
6260 functions returning vectors. For example:
6262 __attribute__((vector_size(16))) short *foo;
6264 In this case, the mode is SI, but the type being modified is
6265 HI, so we need to look further. */
6267 while (POINTER_TYPE_P (type
)
6268 || TREE_CODE (type
) == FUNCTION_TYPE
6269 || TREE_CODE (type
) == ARRAY_TYPE
)
6270 type
= TREE_TYPE (type
);
6272 /* Get the mode of the type being modified. */
6273 orig_mode
= TYPE_MODE (type
);
6275 if ((!INTEGRAL_TYPE_P (type
)
6276 && !SCALAR_FLOAT_TYPE_P (type
)
6277 && !FIXED_POINT_TYPE_P (type
))
6278 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
6279 && GET_MODE_CLASS (orig_mode
) != MODE_INT
6280 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
6281 || !host_integerp (TYPE_SIZE_UNIT (type
), 1)
6282 || TREE_CODE (type
) == BOOLEAN_TYPE
)
6284 error ("invalid vector type for attribute %qs",
6285 IDENTIFIER_POINTER (name
));
6289 if (vecsize
% tree_low_cst (TYPE_SIZE_UNIT (type
), 1))
6291 error ("vector size not an integral multiple of component size");
6297 error ("zero vector size");
6301 /* Calculate how many units fit in the vector. */
6302 nunits
= vecsize
/ tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
6303 if (nunits
& (nunits
- 1))
6305 error ("number of components of the vector not a power of two");
6309 new_type
= build_vector_type (type
, nunits
);
6311 /* Build back pointers if needed. */
6312 *node
= reconstruct_complex_type (*node
, new_type
);
6317 /* Handle a "vector_type" attribute; arguments as in
6318 struct attribute_spec.handler. */
6321 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6322 int ARG_UNUSED (flags
),
6325 /* Vector representative type and size. */
6326 tree rep_type
= *node
;
6327 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
6330 /* Vector size in bytes and number of units. */
6331 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
6333 /* Vector element type and mode. */
6335 enum machine_mode elem_mode
;
6337 *no_add_attrs
= true;
6339 /* Get the representative array type, possibly nested within a
6340 padding record e.g. for alignment purposes. */
6342 if (TYPE_IS_PADDING_P (rep_type
))
6343 rep_type
= TREE_TYPE (TYPE_FIELDS (rep_type
));
6345 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
6347 error ("attribute %qs applies to array types only",
6348 IDENTIFIER_POINTER (name
));
6352 /* Silently punt on variable sizes. We can't make vector types for them,
6353 need to ignore them on front-end generated subtypes of unconstrained
6354 bases, and this attribute is for binding implementors, not end-users, so
6355 we should never get there from legitimate explicit uses. */
6357 if (!host_integerp (rep_size
, 1))
6360 /* Get the element type/mode and check this is something we know
6361 how to make vectors of. */
6363 elem_type
= TREE_TYPE (rep_type
);
6364 elem_mode
= TYPE_MODE (elem_type
);
6366 if ((!INTEGRAL_TYPE_P (elem_type
)
6367 && !SCALAR_FLOAT_TYPE_P (elem_type
)
6368 && !FIXED_POINT_TYPE_P (elem_type
))
6369 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
6370 && GET_MODE_CLASS (elem_mode
) != MODE_INT
6371 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
6372 || !host_integerp (TYPE_SIZE_UNIT (elem_type
), 1))
6374 error ("invalid element type for attribute %qs",
6375 IDENTIFIER_POINTER (name
));
6379 /* Sanity check the vector size and element type consistency. */
6381 vec_bytes
= tree_low_cst (rep_size
, 1);
6383 if (vec_bytes
% tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1))
6385 error ("vector size not an integral multiple of component size");
6391 error ("zero vector size");
6395 vec_units
= vec_bytes
/ tree_low_cst (TYPE_SIZE_UNIT (elem_type
), 1);
6396 if (vec_units
& (vec_units
- 1))
6398 error ("number of components of the vector not a power of two");
6402 /* Build the vector type and replace. */
6404 *node
= build_vector_type (elem_type
, vec_units
);
6405 rep_name
= TYPE_NAME (rep_type
);
6406 if (TREE_CODE (rep_name
) == TYPE_DECL
)
6407 rep_name
= DECL_NAME (rep_name
);
6408 TYPE_NAME (*node
) = rep_name
;
6409 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
6414 /* ----------------------------------------------------------------------- *
6415 * BUILTIN FUNCTIONS *
6416 * ----------------------------------------------------------------------- */
6418 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6419 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6420 if nonansi_p and flag_no_nonansi_builtin. */
6423 def_builtin_1 (enum built_in_function fncode
,
6425 enum built_in_class fnclass
,
6426 tree fntype
, tree libtype
,
6427 bool both_p
, bool fallback_p
,
6428 bool nonansi_p ATTRIBUTE_UNUSED
,
6429 tree fnattrs
, bool implicit_p
)
6432 const char *libname
;
6434 /* Preserve an already installed decl. It most likely was setup in advance
6435 (e.g. as part of the internal builtins) for specific reasons. */
6436 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6439 gcc_assert ((!both_p
&& !fallback_p
)
6440 || !strncmp (name
, "__builtin_",
6441 strlen ("__builtin_")));
6443 libname
= name
+ strlen ("__builtin_");
6444 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6445 (fallback_p
? libname
: NULL
),
6448 /* ??? This is normally further controlled by command-line options
6449 like -fno-builtin, but we don't have them for Ada. */
6450 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6453 set_builtin_decl (fncode
, decl
, implicit_p
);
6456 static int flag_isoc94
= 0;
6457 static int flag_isoc99
= 0;
6459 /* Install what the common builtins.def offers. */
6462 install_builtin_functions (void)
6464 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6465 NONANSI_P, ATTRS, IMPLICIT, COND) \
6467 def_builtin_1 (ENUM, NAME, CLASS, \
6468 builtin_types[(int) TYPE], \
6469 builtin_types[(int) LIBTYPE], \
6470 BOTH_P, FALLBACK_P, NONANSI_P, \
6471 built_in_attributes[(int) ATTRS], IMPLICIT);
6472 #include "builtins.def"
6476 /* ----------------------------------------------------------------------- *
6477 * BUILTIN FUNCTIONS *
6478 * ----------------------------------------------------------------------- */
6480 /* Install the builtin functions we might need. */
6483 gnat_install_builtins (void)
6485 install_builtin_elementary_types ();
6486 install_builtin_function_types ();
6487 install_builtin_attributes ();
6489 /* Install builtins used by generic middle-end pieces first. Some of these
6490 know about internal specificities and control attributes accordingly, for
6491 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6492 the generic definition from builtins.def. */
6493 build_common_builtin_nodes ();
6495 /* Now, install the target specific builtins, such as the AltiVec family on
6496 ppc, and the common set as exposed by builtins.def. */
6497 targetm
.init_builtins ();
6498 install_builtin_functions ();
6501 #include "gt-ada-utils.h"
6502 #include "gtype-ada.h"