1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2013, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
37 #include "diagnostic-core.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
46 #include "diagnostic.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
66 /* If nonzero, pretend we are allocating at global level. */
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment
;
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment
;
79 /* Tree nodes for the various types and decls we create. */
80 tree gnat_std_decls
[(int) ADT_LAST
];
82 /* Functions to call for each of the possible raise reasons. */
83 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
85 /* Likewise, but with extra info for each of the possible raise reasons. */
86 tree gnat_raise_decls_ext
[(int) LAST_REASON_CODE
+ 1];
88 /* Forward declarations for handlers of attributes. */
89 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
93 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
94 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
95 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
96 static tree
handle_leaf_attribute (tree
*, tree
, tree
, int, bool *);
97 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
98 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
99 static tree
handle_vector_size_attribute (tree
*, tree
, tree
, int, bool *);
100 static tree
handle_vector_type_attribute (tree
*, tree
, tree
, int, bool *);
102 /* Fake handler for attributes we don't properly support, typically because
103 they'd require dragging a lot of the common-c front-end circuitry. */
104 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
106 /* Table of machine-independent internal attributes for Ada. We support
107 this minimal set of attributes to accommodate the needs of builtins. */
108 const struct attribute_spec gnat_internal_attribute_table
[] =
110 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
111 affects_type_identity } */
112 { "const", 0, 0, true, false, false, handle_const_attribute
,
114 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
,
116 { "pure", 0, 0, true, false, false, handle_pure_attribute
,
118 { "no vops", 0, 0, true, false, false, handle_novops_attribute
,
120 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
,
122 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
,
124 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
,
126 { "leaf", 0, 0, true, false, false, handle_leaf_attribute
,
128 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
,
130 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
,
133 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute
,
135 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute
,
137 { "may_alias", 0, 0, false, true, false, NULL
, false },
139 /* ??? format and format_arg are heavy and not supported, which actually
140 prevents support for stdio builtins, which we however declare as part
141 of the common builtins.def contents. */
142 { "format", 3, 3, false, true, true, fake_attribute_handler
, false },
143 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
, false },
145 { NULL
, 0, 0, false, false, false, NULL
, false }
148 /* Associates a GNAT tree node to a GCC tree node. It is used in
149 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
150 of `save_gnu_tree' for more info. */
151 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
153 #define GET_GNU_TREE(GNAT_ENTITY) \
154 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
156 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
157 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
159 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
160 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
162 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
163 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
165 #define GET_DUMMY_NODE(GNAT_ENTITY) \
166 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
168 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
169 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
171 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
172 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
174 /* This variable keeps a table for types for each precision so that we only
175 allocate each of them once. Signed and unsigned types are kept separate.
177 Note that these types are only used when fold-const requests something
178 special. Perhaps we should NOT share these types; we'll see how it
180 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
182 /* Likewise for float types, but record these by mode. */
183 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
185 /* For each binding contour we allocate a binding_level structure to indicate
186 the binding depth. */
188 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
189 /* The binding level containing this one (the enclosing binding level). */
190 struct gnat_binding_level
*chain
;
191 /* The BLOCK node for this level. */
193 /* If nonzero, the setjmp buffer that needs to be updated for any
194 variable-sized definition within this context. */
198 /* The binding level currently in effect. */
199 static GTY(()) struct gnat_binding_level
*current_binding_level
;
201 /* A chain of gnat_binding_level structures awaiting reuse. */
202 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
204 /* The context to be used for global declarations. */
205 static GTY(()) tree global_context
;
207 /* An array of global declarations. */
208 static GTY(()) vec
<tree
, va_gc
> *global_decls
;
210 /* An array of builtin function declarations. */
211 static GTY(()) vec
<tree
, va_gc
> *builtin_decls
;
213 /* An array of global renaming pointers. */
214 static GTY(()) vec
<tree
, va_gc
> *global_renaming_pointers
;
216 /* A chain of unused BLOCK nodes. */
217 static GTY((deletable
)) tree free_block_chain
;
219 static int pad_type_hash_marked_p (const void *p
);
220 static hashval_t
pad_type_hash_hash (const void *p
);
221 static int pad_type_hash_eq (const void *p1
, const void *p2
);
223 /* A hash table of padded types. It is modelled on the generic type
224 hash table in tree.c, which must thus be used as a reference. */
225 struct GTY(()) pad_type_hash
{
230 static GTY ((if_marked ("pad_type_hash_marked_p"),
231 param_is (struct pad_type_hash
)))
232 htab_t pad_type_hash_table
;
234 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
235 static tree
compute_related_constant (tree
, tree
);
236 static tree
split_plus (tree
, tree
*);
237 static tree
float_type_for_precision (int, enum machine_mode
);
238 static tree
convert_to_fat_pointer (tree
, tree
);
239 static unsigned int scale_by_factor_of (tree
, unsigned int);
240 static bool potential_alignment_gap (tree
, tree
, tree
);
242 /* Initialize data structures of the utils.c module. */
245 init_gnat_utils (void)
247 /* Initialize the association of GNAT nodes to GCC trees. */
248 associate_gnat_to_gnu
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
250 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
251 dummy_node_table
= ggc_alloc_cleared_vec_tree (max_gnat_nodes
);
253 /* Initialize the hash table of padded types. */
254 pad_type_hash_table
= htab_create_ggc (512, pad_type_hash_hash
,
255 pad_type_hash_eq
, 0);
258 /* Destroy data structures of the utils.c module. */
261 destroy_gnat_utils (void)
263 /* Destroy the association of GNAT nodes to GCC trees. */
264 ggc_free (associate_gnat_to_gnu
);
265 associate_gnat_to_gnu
= NULL
;
267 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
268 ggc_free (dummy_node_table
);
269 dummy_node_table
= NULL
;
271 /* Destroy the hash table of padded types. */
272 htab_delete (pad_type_hash_table
);
273 pad_type_hash_table
= NULL
;
275 /* Invalidate the global renaming pointers. */
276 invalidate_global_renaming_pointers ();
279 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
280 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
281 If NO_CHECK is true, the latter check is suppressed.
283 If GNU_DECL is zero, reset a previous association. */
286 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
288 /* Check that GNAT_ENTITY is not already defined and that it is being set
289 to something which is a decl. If that is not the case, this usually
290 means GNAT_ENTITY is defined twice, but occasionally is due to some
292 gcc_assert (!(gnu_decl
293 && (PRESENT_GNU_TREE (gnat_entity
)
294 || (!no_check
&& !DECL_P (gnu_decl
)))));
296 SET_GNU_TREE (gnat_entity
, gnu_decl
);
299 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
300 that was associated with it. If there is no such tree node, abort.
302 In some cases, such as delayed elaboration or expressions that need to
303 be elaborated only once, GNAT_ENTITY is really not an entity. */
306 get_gnu_tree (Entity_Id gnat_entity
)
308 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
309 return GET_GNU_TREE (gnat_entity
);
312 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
315 present_gnu_tree (Entity_Id gnat_entity
)
317 return PRESENT_GNU_TREE (gnat_entity
);
320 /* Make a dummy type corresponding to GNAT_TYPE. */
323 make_dummy_type (Entity_Id gnat_type
)
325 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
328 /* If there is an equivalent type, get its underlying type. */
329 if (Present (gnat_underlying
))
330 gnat_underlying
= Gigi_Equivalent_Type (Underlying_Type (gnat_underlying
));
332 /* If there was no equivalent type (can only happen when just annotating
333 types) or underlying type, go back to the original type. */
334 if (No (gnat_underlying
))
335 gnat_underlying
= gnat_type
;
337 /* If it there already a dummy type, use that one. Else make one. */
338 if (PRESENT_DUMMY_NODE (gnat_underlying
))
339 return GET_DUMMY_NODE (gnat_underlying
);
341 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
343 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
344 ? tree_code_for_record_type (gnat_underlying
)
346 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
347 TYPE_DUMMY_P (gnu_type
) = 1;
348 TYPE_STUB_DECL (gnu_type
)
349 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
350 if (Is_By_Reference_Type (gnat_underlying
))
351 TYPE_BY_REFERENCE_P (gnu_type
) = 1;
353 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
358 /* Return the dummy type that was made for GNAT_TYPE, if any. */
361 get_dummy_type (Entity_Id gnat_type
)
363 return GET_DUMMY_NODE (gnat_type
);
366 /* Build dummy fat and thin pointer types whose designated type is specified
367 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
370 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type
, tree gnu_desig_type
)
372 tree gnu_template_type
, gnu_ptr_template
, gnu_array_type
, gnu_ptr_array
;
373 tree gnu_fat_type
, fields
, gnu_object_type
;
375 gnu_template_type
= make_node (RECORD_TYPE
);
376 TYPE_NAME (gnu_template_type
) = create_concat_name (gnat_desig_type
, "XUB");
377 TYPE_DUMMY_P (gnu_template_type
) = 1;
378 gnu_ptr_template
= build_pointer_type (gnu_template_type
);
380 gnu_array_type
= make_node (ENUMERAL_TYPE
);
381 TYPE_NAME (gnu_array_type
) = create_concat_name (gnat_desig_type
, "XUA");
382 TYPE_DUMMY_P (gnu_array_type
) = 1;
383 gnu_ptr_array
= build_pointer_type (gnu_array_type
);
385 gnu_fat_type
= make_node (RECORD_TYPE
);
386 /* Build a stub DECL to trigger the special processing for fat pointer types
388 TYPE_NAME (gnu_fat_type
)
389 = create_type_stub_decl (create_concat_name (gnat_desig_type
, "XUP"),
391 fields
= create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array
,
392 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
394 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template
,
395 gnu_fat_type
, NULL_TREE
, NULL_TREE
, 0, 0);
396 finish_fat_pointer_type (gnu_fat_type
, fields
);
397 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type
, gnu_desig_type
);
398 /* Suppress debug info until after the type is completed. */
399 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type
)) = 1;
401 gnu_object_type
= make_node (RECORD_TYPE
);
402 TYPE_NAME (gnu_object_type
) = create_concat_name (gnat_desig_type
, "XUT");
403 TYPE_DUMMY_P (gnu_object_type
) = 1;
405 TYPE_POINTER_TO (gnu_desig_type
) = gnu_fat_type
;
406 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type
) = gnu_object_type
;
409 /* Return true if we are in the global binding level. */
412 global_bindings_p (void)
414 return force_global
|| current_function_decl
== NULL_TREE
;
417 /* Enter a new binding level. */
420 gnat_pushlevel (void)
422 struct gnat_binding_level
*newlevel
= NULL
;
424 /* Reuse a struct for this binding level, if there is one. */
425 if (free_binding_level
)
427 newlevel
= free_binding_level
;
428 free_binding_level
= free_binding_level
->chain
;
431 newlevel
= ggc_alloc_gnat_binding_level ();
433 /* Use a free BLOCK, if any; otherwise, allocate one. */
434 if (free_block_chain
)
436 newlevel
->block
= free_block_chain
;
437 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
438 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
441 newlevel
->block
= make_node (BLOCK
);
443 /* Point the BLOCK we just made to its parent. */
444 if (current_binding_level
)
445 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
447 BLOCK_VARS (newlevel
->block
) = NULL_TREE
;
448 BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
449 TREE_USED (newlevel
->block
) = 1;
451 /* Add this level to the front of the chain (stack) of active levels. */
452 newlevel
->chain
= current_binding_level
;
453 newlevel
->jmpbuf_decl
= NULL_TREE
;
454 current_binding_level
= newlevel
;
457 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
458 and point FNDECL to this BLOCK. */
461 set_current_block_context (tree fndecl
)
463 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
464 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
465 set_block_for_group (current_binding_level
->block
);
468 /* Set the jmpbuf_decl for the current binding level to DECL. */
471 set_block_jmpbuf_decl (tree decl
)
473 current_binding_level
->jmpbuf_decl
= decl
;
476 /* Get the jmpbuf_decl, if any, for the current binding level. */
479 get_block_jmpbuf_decl (void)
481 return current_binding_level
->jmpbuf_decl
;
484 /* Exit a binding level. Set any BLOCK into the current code group. */
489 struct gnat_binding_level
*level
= current_binding_level
;
490 tree block
= level
->block
;
492 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
493 BLOCK_SUBBLOCKS (block
) = blocks_nreverse (BLOCK_SUBBLOCKS (block
));
495 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
496 are no variables free the block and merge its subblocks into those of its
497 parent block. Otherwise, add it to the list of its parent. */
498 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
500 else if (BLOCK_VARS (block
) == NULL_TREE
)
502 BLOCK_SUBBLOCKS (level
->chain
->block
)
503 = block_chainon (BLOCK_SUBBLOCKS (block
),
504 BLOCK_SUBBLOCKS (level
->chain
->block
));
505 BLOCK_CHAIN (block
) = free_block_chain
;
506 free_block_chain
= block
;
510 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
511 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
512 TREE_USED (block
) = 1;
513 set_block_for_group (block
);
516 /* Free this binding structure. */
517 current_binding_level
= level
->chain
;
518 level
->chain
= free_binding_level
;
519 free_binding_level
= level
;
522 /* Exit a binding level and discard the associated BLOCK. */
527 struct gnat_binding_level
*level
= current_binding_level
;
528 tree block
= level
->block
;
530 BLOCK_CHAIN (block
) = free_block_chain
;
531 free_block_chain
= block
;
533 /* Free this binding structure. */
534 current_binding_level
= level
->chain
;
535 level
->chain
= free_binding_level
;
536 free_binding_level
= level
;
539 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
542 gnat_set_type_context (tree type
, tree context
)
544 tree decl
= TYPE_STUB_DECL (type
);
546 TYPE_CONTEXT (type
) = context
;
548 while (decl
&& DECL_PARALLEL_TYPE (decl
))
550 TYPE_CONTEXT (DECL_PARALLEL_TYPE (decl
)) = context
;
551 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
555 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
556 for location information and flag propagation. */
559 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
561 /* If DECL is public external or at top level, it has global context. */
562 if ((TREE_PUBLIC (decl
) && DECL_EXTERNAL (decl
)) || global_bindings_p ())
565 global_context
= build_translation_unit_decl (NULL_TREE
);
566 DECL_CONTEXT (decl
) = global_context
;
570 DECL_CONTEXT (decl
) = current_function_decl
;
572 /* Functions imported in another function are not really nested.
573 For really nested functions mark them initially as needing
574 a static chain for uses of that flag before unnesting;
575 lower_nested_functions will then recompute it. */
576 if (TREE_CODE (decl
) == FUNCTION_DECL
&& !TREE_PUBLIC (decl
))
577 DECL_STATIC_CHAIN (decl
) = 1;
580 TREE_NO_WARNING (decl
) = (No (gnat_node
) || Warnings_Off (gnat_node
));
582 /* Set the location of DECL and emit a declaration for it. */
583 if (Present (gnat_node
))
584 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
586 add_decl_expr (decl
, gnat_node
);
588 /* Put the declaration on the list. The list of declarations is in reverse
589 order. The list will be reversed later. Put global declarations in the
590 globals list and local ones in the current block. But skip TYPE_DECLs
591 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
592 with the debugger and aren't needed anyway. */
593 if (!(TREE_CODE (decl
) == TYPE_DECL
594 && TREE_CODE (TREE_TYPE (decl
)) == UNCONSTRAINED_ARRAY_TYPE
))
596 if (DECL_EXTERNAL (decl
))
598 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
599 vec_safe_push (builtin_decls
, decl
);
601 else if (global_bindings_p ())
602 vec_safe_push (global_decls
, decl
);
605 DECL_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
606 BLOCK_VARS (current_binding_level
->block
) = decl
;
610 /* For the declaration of a type, set its name if it either is not already
611 set or if the previous type name was not derived from a source name.
612 We'd rather have the type named with a real name and all the pointer
613 types to the same object have the same POINTER_TYPE node. Code in the
614 equivalent function of c-decl.c makes a copy of the type node here, but
615 that may cause us trouble with incomplete types. We make an exception
616 for fat pointer types because the compiler automatically builds them
617 for unconstrained array types and the debugger uses them to represent
618 both these and pointers to these. */
619 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
621 tree t
= TREE_TYPE (decl
);
623 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
625 /* Array and pointer types aren't "tagged" types so we force the
626 type to be associated with its typedef in the DWARF back-end,
627 in order to make sure that the latter is always preserved. */
628 if (!DECL_ARTIFICIAL (decl
)
629 && (TREE_CODE (t
) == ARRAY_TYPE
630 || TREE_CODE (t
) == POINTER_TYPE
))
632 tree tt
= build_distinct_type_copy (t
);
633 if (TREE_CODE (t
) == POINTER_TYPE
)
634 TYPE_NEXT_PTR_TO (t
) = tt
;
635 TYPE_NAME (tt
) = DECL_NAME (decl
);
636 gnat_set_type_context (tt
, DECL_CONTEXT (decl
));
637 TYPE_STUB_DECL (tt
) = TYPE_STUB_DECL (t
);
638 DECL_ORIGINAL_TYPE (decl
) = tt
;
641 else if (TYPE_IS_FAT_POINTER_P (t
))
643 /* We need a variant for the placeholder machinery to work. */
644 tree tt
= build_variant_type_copy (t
);
645 TYPE_NAME (tt
) = decl
;
646 gnat_set_type_context (tt
, DECL_CONTEXT (decl
));
647 TREE_USED (tt
) = TREE_USED (t
);
648 TREE_TYPE (decl
) = tt
;
649 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t
)))
650 DECL_ORIGINAL_TYPE (decl
) = DECL_ORIGINAL_TYPE (TYPE_NAME (t
));
652 DECL_ORIGINAL_TYPE (decl
) = t
;
653 DECL_ARTIFICIAL (decl
) = 0;
656 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
661 /* Propagate the name to all the anonymous variants. This is needed
662 for the type qualifiers machinery to work properly. */
664 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
665 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
667 TYPE_NAME (t
) = decl
;
668 gnat_set_type_context (t
, DECL_CONTEXT (decl
));
673 /* Create a record type that contains a SIZE bytes long field of TYPE with a
674 starting bit position so that it is aligned to ALIGN bits, and leaving at
675 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
676 record is guaranteed to get. GNAT_NODE is used for the position of the
677 associated TYPE_DECL. */
680 make_aligning_type (tree type
, unsigned int align
, tree size
,
681 unsigned int base_align
, int room
, Node_Id gnat_node
)
683 /* We will be crafting a record type with one field at a position set to be
684 the next multiple of ALIGN past record'address + room bytes. We use a
685 record placeholder to express record'address. */
686 tree record_type
= make_node (RECORD_TYPE
);
687 tree record
= build0 (PLACEHOLDER_EXPR
, record_type
);
690 = convert (sizetype
, build_unary_op (ADDR_EXPR
, NULL_TREE
, record
));
692 /* The diagram below summarizes the shape of what we manipulate:
694 <--------- pos ---------->
695 { +------------+-------------+-----------------+
696 record =>{ |############| ... | field (type) |
697 { +------------+-------------+-----------------+
698 |<-- room -->|<- voffset ->|<---- size ----->|
701 record_addr vblock_addr
703 Every length is in sizetype bytes there, except "pos" which has to be
704 set as a bit position in the GCC tree for the record. */
705 tree room_st
= size_int (room
);
706 tree vblock_addr_st
= size_binop (PLUS_EXPR
, record_addr_st
, room_st
);
707 tree voffset_st
, pos
, field
;
709 tree name
= TYPE_NAME (type
);
711 if (TREE_CODE (name
) == TYPE_DECL
)
712 name
= DECL_NAME (name
);
713 name
= concat_name (name
, "ALIGN");
714 TYPE_NAME (record_type
) = name
;
716 /* Compute VOFFSET and then POS. The next byte position multiple of some
717 alignment after some address is obtained by "and"ing the alignment minus
718 1 with the two's complement of the address. */
719 voffset_st
= size_binop (BIT_AND_EXPR
,
720 fold_build1 (NEGATE_EXPR
, sizetype
, vblock_addr_st
),
721 size_int ((align
/ BITS_PER_UNIT
) - 1));
723 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
724 pos
= size_binop (MULT_EXPR
,
725 convert (bitsizetype
,
726 size_binop (PLUS_EXPR
, room_st
, voffset_st
)),
729 /* Craft the GCC record representation. We exceptionally do everything
730 manually here because 1) our generic circuitry is not quite ready to
731 handle the complex position/size expressions we are setting up, 2) we
732 have a strong simplifying factor at hand: we know the maximum possible
733 value of voffset, and 3) we have to set/reset at least the sizes in
734 accordance with this maximum value anyway, as we need them to convey
735 what should be "alloc"ated for this type.
737 Use -1 as the 'addressable' indication for the field to prevent the
738 creation of a bitfield. We don't need one, it would have damaging
739 consequences on the alignment computation, and create_field_decl would
740 make one without this special argument, for instance because of the
741 complex position expression. */
742 field
= create_field_decl (get_identifier ("F"), type
, record_type
, size
,
744 TYPE_FIELDS (record_type
) = field
;
746 TYPE_ALIGN (record_type
) = base_align
;
747 TYPE_USER_ALIGN (record_type
) = 1;
749 TYPE_SIZE (record_type
)
750 = size_binop (PLUS_EXPR
,
751 size_binop (MULT_EXPR
, convert (bitsizetype
, size
),
753 bitsize_int (align
+ room
* BITS_PER_UNIT
));
754 TYPE_SIZE_UNIT (record_type
)
755 = size_binop (PLUS_EXPR
, size
,
756 size_int (room
+ align
/ BITS_PER_UNIT
));
758 SET_TYPE_MODE (record_type
, BLKmode
);
759 relate_alias_sets (record_type
, type
, ALIAS_SET_COPY
);
761 /* Declare it now since it will never be declared otherwise. This is
762 necessary to ensure that its subtrees are properly marked. */
763 create_type_decl (name
, record_type
, true, false, gnat_node
);
768 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
769 as the field type of a packed record if IN_RECORD is true, or as the
770 component type of a packed array if IN_RECORD is false. See if we can
771 rewrite it either as a type that has a non-BLKmode, which we can pack
772 tighter in the packed record case, or as a smaller type. If so, return
773 the new type. If not, return the original type. */
776 make_packable_type (tree type
, bool in_record
)
778 unsigned HOST_WIDE_INT size
= tree_to_uhwi (TYPE_SIZE (type
));
779 unsigned HOST_WIDE_INT new_size
;
780 tree new_type
, old_field
, field_list
= NULL_TREE
;
783 /* No point in doing anything if the size is zero. */
787 new_type
= make_node (TREE_CODE (type
));
789 /* Copy the name and flags from the old type to that of the new.
790 Note that we rely on the pointer equality created here for
791 TYPE_NAME to look through conversions in various places. */
792 TYPE_NAME (new_type
) = TYPE_NAME (type
);
793 TYPE_JUSTIFIED_MODULAR_P (new_type
) = TYPE_JUSTIFIED_MODULAR_P (type
);
794 TYPE_CONTAINS_TEMPLATE_P (new_type
) = TYPE_CONTAINS_TEMPLATE_P (type
);
795 if (TREE_CODE (type
) == RECORD_TYPE
)
796 TYPE_PADDING_P (new_type
) = TYPE_PADDING_P (type
);
798 /* If we are in a record and have a small size, set the alignment to
799 try for an integral mode. Otherwise set it to try for a smaller
800 type with BLKmode. */
801 if (in_record
&& size
<= MAX_FIXED_MODE_SIZE
)
803 align
= ceil_pow2 (size
);
804 TYPE_ALIGN (new_type
) = align
;
805 new_size
= (size
+ align
- 1) & -align
;
809 unsigned HOST_WIDE_INT align
;
811 /* Do not try to shrink the size if the RM size is not constant. */
812 if (TYPE_CONTAINS_TEMPLATE_P (type
)
813 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type
)))
816 /* Round the RM size up to a unit boundary to get the minimal size
817 for a BLKmode record. Give up if it's already the size. */
818 new_size
= tree_to_uhwi (TYPE_ADA_SIZE (type
));
819 new_size
= (new_size
+ BITS_PER_UNIT
- 1) & -BITS_PER_UNIT
;
820 if (new_size
== size
)
823 align
= new_size
& -new_size
;
824 TYPE_ALIGN (new_type
) = MIN (TYPE_ALIGN (type
), align
);
827 TYPE_USER_ALIGN (new_type
) = 1;
829 /* Now copy the fields, keeping the position and size as we don't want
830 to change the layout by propagating the packedness downwards. */
831 for (old_field
= TYPE_FIELDS (type
); old_field
;
832 old_field
= DECL_CHAIN (old_field
))
834 tree new_field_type
= TREE_TYPE (old_field
);
835 tree new_field
, new_size
;
837 if (RECORD_OR_UNION_TYPE_P (new_field_type
)
838 && !TYPE_FAT_POINTER_P (new_field_type
)
839 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type
)))
840 new_field_type
= make_packable_type (new_field_type
, true);
842 /* However, for the last field in a not already packed record type
843 that is of an aggregate type, we need to use the RM size in the
844 packable version of the record type, see finish_record_type. */
845 if (!DECL_CHAIN (old_field
)
846 && !TYPE_PACKED (type
)
847 && RECORD_OR_UNION_TYPE_P (new_field_type
)
848 && !TYPE_FAT_POINTER_P (new_field_type
)
849 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type
)
850 && TYPE_ADA_SIZE (new_field_type
))
851 new_size
= TYPE_ADA_SIZE (new_field_type
);
853 new_size
= DECL_SIZE (old_field
);
856 = create_field_decl (DECL_NAME (old_field
), new_field_type
, new_type
,
857 new_size
, bit_position (old_field
),
859 !DECL_NONADDRESSABLE_P (old_field
));
861 DECL_INTERNAL_P (new_field
) = DECL_INTERNAL_P (old_field
);
862 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field
, old_field
);
863 if (TREE_CODE (new_type
) == QUAL_UNION_TYPE
)
864 DECL_QUALIFIER (new_field
) = DECL_QUALIFIER (old_field
);
866 DECL_CHAIN (new_field
) = field_list
;
867 field_list
= new_field
;
870 finish_record_type (new_type
, nreverse (field_list
), 2, false);
871 relate_alias_sets (new_type
, type
, ALIAS_SET_COPY
);
872 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type
),
873 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
)));
875 /* If this is a padding record, we never want to make the size smaller
876 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
877 if (TYPE_IS_PADDING_P (type
) || TREE_CODE (type
) == QUAL_UNION_TYPE
)
879 TYPE_SIZE (new_type
) = TYPE_SIZE (type
);
880 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (type
);
885 TYPE_SIZE (new_type
) = bitsize_int (new_size
);
886 TYPE_SIZE_UNIT (new_type
)
887 = size_int ((new_size
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
);
890 if (!TYPE_CONTAINS_TEMPLATE_P (type
))
891 SET_TYPE_ADA_SIZE (new_type
, TYPE_ADA_SIZE (type
));
893 compute_record_mode (new_type
);
895 /* Try harder to get a packable type if necessary, for example
896 in case the record itself contains a BLKmode field. */
897 if (in_record
&& TYPE_MODE (new_type
) == BLKmode
)
898 SET_TYPE_MODE (new_type
,
899 mode_for_size_tree (TYPE_SIZE (new_type
), MODE_INT
, 1));
901 /* If neither the mode nor the size has shrunk, return the old type. */
902 if (TYPE_MODE (new_type
) == BLKmode
&& new_size
>= size
)
908 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
909 If TYPE is the best type, return it. Otherwise, make a new type. We
910 only support new integral and pointer types. FOR_BIASED is true if
911 we are making a biased type. */
914 make_type_from_size (tree type
, tree size_tree
, bool for_biased
)
916 unsigned HOST_WIDE_INT size
;
920 /* If size indicates an error, just return TYPE to avoid propagating
921 the error. Likewise if it's too large to represent. */
922 if (!size_tree
|| !tree_fits_uhwi_p (size_tree
))
925 size
= tree_to_uhwi (size_tree
);
927 switch (TREE_CODE (type
))
932 biased_p
= (TREE_CODE (type
) == INTEGER_TYPE
933 && TYPE_BIASED_REPRESENTATION_P (type
));
935 /* Integer types with precision 0 are forbidden. */
939 /* Only do something if the type isn't a packed array type and doesn't
940 already have the proper size and the size isn't too large. */
941 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type
)
942 || (TYPE_PRECISION (type
) == size
&& biased_p
== for_biased
)
943 || size
> LONG_LONG_TYPE_SIZE
)
946 biased_p
|= for_biased
;
947 if (TYPE_UNSIGNED (type
) || biased_p
)
948 new_type
= make_unsigned_type (size
);
950 new_type
= make_signed_type (size
);
951 TREE_TYPE (new_type
) = TREE_TYPE (type
) ? TREE_TYPE (type
) : type
;
952 SET_TYPE_RM_MIN_VALUE (new_type
,
953 convert (TREE_TYPE (new_type
),
954 TYPE_MIN_VALUE (type
)));
955 SET_TYPE_RM_MAX_VALUE (new_type
,
956 convert (TREE_TYPE (new_type
),
957 TYPE_MAX_VALUE (type
)));
958 /* Copy the name to show that it's essentially the same type and
959 not a subrange type. */
960 TYPE_NAME (new_type
) = TYPE_NAME (type
);
961 TYPE_BIASED_REPRESENTATION_P (new_type
) = biased_p
;
962 SET_TYPE_RM_SIZE (new_type
, bitsize_int (size
));
966 /* Do something if this is a fat pointer, in which case we
967 may need to return the thin pointer. */
968 if (TYPE_FAT_POINTER_P (type
) && size
< POINTER_SIZE
* 2)
970 enum machine_mode p_mode
= mode_for_size (size
, MODE_INT
, 0);
971 if (!targetm
.valid_pointer_mode (p_mode
))
974 build_pointer_type_for_mode
975 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type
)),
981 /* Only do something if this is a thin pointer, in which case we
982 may need to return the fat pointer. */
983 if (TYPE_IS_THIN_POINTER_P (type
) && size
>= POINTER_SIZE
* 2)
985 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)));
995 /* See if the data pointed to by the hash table slot is marked. */
998 pad_type_hash_marked_p (const void *p
)
1000 const_tree
const type
= ((const struct pad_type_hash
*) p
)->type
;
1002 return ggc_marked_p (type
);
1005 /* Return the cached hash value. */
1008 pad_type_hash_hash (const void *p
)
1010 return ((const struct pad_type_hash
*) p
)->hash
;
1013 /* Return 1 iff the padded types are equivalent. */
1016 pad_type_hash_eq (const void *p1
, const void *p2
)
1018 const struct pad_type_hash
*const t1
= (const struct pad_type_hash
*) p1
;
1019 const struct pad_type_hash
*const t2
= (const struct pad_type_hash
*) p2
;
1022 if (t1
->hash
!= t2
->hash
)
1028 /* We consider that the padded types are equivalent if they pad the same
1029 type and have the same size, alignment and RM size. Taking the mode
1030 into account is redundant since it is determined by the others. */
1032 TREE_TYPE (TYPE_FIELDS (type1
)) == TREE_TYPE (TYPE_FIELDS (type2
))
1033 && TYPE_SIZE (type1
) == TYPE_SIZE (type2
)
1034 && TYPE_ALIGN (type1
) == TYPE_ALIGN (type2
)
1035 && TYPE_ADA_SIZE (type1
) == TYPE_ADA_SIZE (type2
);
1038 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1039 if needed. We have already verified that SIZE and TYPE are large enough.
1040 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1041 IS_COMPONENT_TYPE is true if this is being done for the component type of
1042 an array. IS_USER_TYPE is true if the original type needs to be completed.
1043 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1044 the RM size of the resulting type is to be set to SIZE too. */
1047 maybe_pad_type (tree type
, tree size
, unsigned int align
,
1048 Entity_Id gnat_entity
, bool is_component_type
,
1049 bool is_user_type
, bool definition
, bool set_rm_size
)
1051 tree orig_size
= TYPE_SIZE (type
);
1054 /* If TYPE is a padded type, see if it agrees with any size and alignment
1055 we were given. If so, return the original type. Otherwise, strip
1056 off the padding, since we will either be returning the inner type
1057 or repadding it. If no size or alignment is specified, use that of
1058 the original padded type. */
1059 if (TYPE_IS_PADDING_P (type
))
1062 || operand_equal_p (round_up (size
,
1063 MAX (align
, TYPE_ALIGN (type
))),
1064 round_up (TYPE_SIZE (type
),
1065 MAX (align
, TYPE_ALIGN (type
))),
1067 && (align
== 0 || align
== TYPE_ALIGN (type
)))
1071 size
= TYPE_SIZE (type
);
1073 align
= TYPE_ALIGN (type
);
1075 type
= TREE_TYPE (TYPE_FIELDS (type
));
1076 orig_size
= TYPE_SIZE (type
);
1079 /* If the size is either not being changed or is being made smaller (which
1080 is not done here and is only valid for bitfields anyway), show the size
1081 isn't changing. Likewise, clear the alignment if it isn't being
1082 changed. Then return if we aren't doing anything. */
1084 && (operand_equal_p (size
, orig_size
, 0)
1085 || (TREE_CODE (orig_size
) == INTEGER_CST
1086 && tree_int_cst_lt (size
, orig_size
))))
1089 if (align
== TYPE_ALIGN (type
))
1092 if (align
== 0 && !size
)
1095 /* If requested, complete the original type and give it a name. */
1097 create_type_decl (get_entity_name (gnat_entity
), type
,
1098 !Comes_From_Source (gnat_entity
),
1100 && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1101 && DECL_IGNORED_P (TYPE_NAME (type
))),
1104 /* We used to modify the record in place in some cases, but that could
1105 generate incorrect debugging information. So make a new record
1107 record
= make_node (RECORD_TYPE
);
1108 TYPE_PADDING_P (record
) = 1;
1110 if (Present (gnat_entity
))
1111 TYPE_NAME (record
) = create_concat_name (gnat_entity
, "PAD");
1113 TYPE_ALIGN (record
) = align
;
1114 TYPE_SIZE (record
) = size
? size
: orig_size
;
1115 TYPE_SIZE_UNIT (record
)
1116 = convert (sizetype
,
1117 size_binop (CEIL_DIV_EXPR
, TYPE_SIZE (record
),
1118 bitsize_unit_node
));
1120 /* If we are changing the alignment and the input type is a record with
1121 BLKmode and a small constant size, try to make a form that has an
1122 integral mode. This might allow the padding record to also have an
1123 integral mode, which will be much more efficient. There is no point
1124 in doing so if a size is specified unless it is also a small constant
1125 size and it is incorrect to do so if we cannot guarantee that the mode
1126 will be naturally aligned since the field must always be addressable.
1128 ??? This might not always be a win when done for a stand-alone object:
1129 since the nominal and the effective type of the object will now have
1130 different modes, a VIEW_CONVERT_EXPR will be required for converting
1131 between them and it might be hard to overcome afterwards, including
1132 at the RTL level when the stand-alone object is accessed as a whole. */
1134 && RECORD_OR_UNION_TYPE_P (type
)
1135 && TYPE_MODE (type
) == BLKmode
1136 && !TYPE_BY_REFERENCE_P (type
)
1137 && TREE_CODE (orig_size
) == INTEGER_CST
1138 && !TREE_OVERFLOW (orig_size
)
1139 && compare_tree_int (orig_size
, MAX_FIXED_MODE_SIZE
) <= 0
1141 || (TREE_CODE (size
) == INTEGER_CST
1142 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) <= 0)))
1144 tree packable_type
= make_packable_type (type
, true);
1145 if (TYPE_MODE (packable_type
) != BLKmode
1146 && align
>= TYPE_ALIGN (packable_type
))
1147 type
= packable_type
;
1150 /* Now create the field with the original size. */
1151 field
= create_field_decl (get_identifier ("F"), type
, record
, orig_size
,
1152 bitsize_zero_node
, 0, 1);
1153 DECL_INTERNAL_P (field
) = 1;
1155 /* Do not emit debug info until after the auxiliary record is built. */
1156 finish_record_type (record
, field
, 1, false);
1158 /* Set the RM size if requested. */
1161 SET_TYPE_ADA_SIZE (record
, size
? size
: orig_size
);
1163 /* If the padded type is complete and has constant size, we canonicalize
1164 it by means of the hash table. This is consistent with the language
1165 semantics and ensures that gigi and the middle-end have a common view
1166 of these padded types. */
1167 if (TREE_CONSTANT (TYPE_SIZE (record
)))
1170 struct pad_type_hash in
, *h
;
1173 hashcode
= iterative_hash_object (TYPE_HASH (type
), 0);
1174 hashcode
= iterative_hash_expr (TYPE_SIZE (record
), hashcode
);
1175 hashcode
= iterative_hash_hashval_t (TYPE_ALIGN (record
), hashcode
);
1176 hashcode
= iterative_hash_expr (TYPE_ADA_SIZE (record
), hashcode
);
1180 h
= (struct pad_type_hash
*)
1181 htab_find_with_hash (pad_type_hash_table
, &in
, hashcode
);
1188 h
= ggc_alloc_pad_type_hash ();
1191 loc
= htab_find_slot_with_hash (pad_type_hash_table
, h
, hashcode
,
1197 /* Unless debugging information isn't being written for the input type,
1198 write a record that shows what we are a subtype of and also make a
1199 variable that indicates our size, if still variable. */
1200 if (TREE_CODE (orig_size
) != INTEGER_CST
1201 && TYPE_NAME (record
)
1203 && !(TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
1204 && DECL_IGNORED_P (TYPE_NAME (type
))))
1206 tree marker
= make_node (RECORD_TYPE
);
1207 tree name
= TYPE_NAME (record
);
1208 tree orig_name
= TYPE_NAME (type
);
1210 if (TREE_CODE (name
) == TYPE_DECL
)
1211 name
= DECL_NAME (name
);
1213 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1214 orig_name
= DECL_NAME (orig_name
);
1216 TYPE_NAME (marker
) = concat_name (name
, "XVS");
1217 finish_record_type (marker
,
1218 create_field_decl (orig_name
,
1219 build_reference_type (type
),
1220 marker
, NULL_TREE
, NULL_TREE
,
1224 add_parallel_type (record
, marker
);
1226 if (definition
&& size
&& TREE_CODE (size
) != INTEGER_CST
)
1227 TYPE_SIZE_UNIT (marker
)
1228 = create_var_decl (concat_name (name
, "XVZ"), NULL_TREE
, sizetype
,
1229 TYPE_SIZE_UNIT (record
), false, false, false,
1230 false, NULL
, gnat_entity
);
1233 rest_of_record_type_compilation (record
);
1236 /* If the size was widened explicitly, maybe give a warning. Take the
1237 original size as the maximum size of the input if there was an
1238 unconstrained record involved and round it up to the specified alignment,
1239 if one was specified. But don't do it if we are just annotating types
1240 and the type is tagged, since tagged types aren't fully laid out in this
1243 || TREE_CODE (size
) == COND_EXPR
1244 || TREE_CODE (size
) == MAX_EXPR
1246 || (type_annotate_only
&& Is_Tagged_Type (Etype (gnat_entity
))))
1249 if (CONTAINS_PLACEHOLDER_P (orig_size
))
1250 orig_size
= max_size (orig_size
, true);
1253 orig_size
= round_up (orig_size
, align
);
1255 if (!operand_equal_p (size
, orig_size
, 0)
1256 && !(TREE_CODE (size
) == INTEGER_CST
1257 && TREE_CODE (orig_size
) == INTEGER_CST
1258 && (TREE_OVERFLOW (size
)
1259 || TREE_OVERFLOW (orig_size
)
1260 || tree_int_cst_lt (size
, orig_size
))))
1262 Node_Id gnat_error_node
= Empty
;
1264 if (Is_Packed_Array_Type (gnat_entity
))
1265 gnat_entity
= Original_Array_Type (gnat_entity
);
1267 if ((Ekind (gnat_entity
) == E_Component
1268 || Ekind (gnat_entity
) == E_Discriminant
)
1269 && Present (Component_Clause (gnat_entity
)))
1270 gnat_error_node
= Last_Bit (Component_Clause (gnat_entity
));
1271 else if (Present (Size_Clause (gnat_entity
)))
1272 gnat_error_node
= Expression (Size_Clause (gnat_entity
));
1274 /* Generate message only for entities that come from source, since
1275 if we have an entity created by expansion, the message will be
1276 generated for some other corresponding source entity. */
1277 if (Comes_From_Source (gnat_entity
))
1279 if (Present (gnat_error_node
))
1280 post_error_ne_tree ("{^ }bits of & unused?",
1281 gnat_error_node
, gnat_entity
,
1282 size_diffop (size
, orig_size
));
1283 else if (is_component_type
)
1284 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1285 gnat_entity
, gnat_entity
,
1286 size_diffop (size
, orig_size
));
1293 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1294 If this is a multi-dimensional array type, do this recursively.
1297 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1298 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1299 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1302 relate_alias_sets (tree gnu_new_type
, tree gnu_old_type
, enum alias_set_op op
)
1304 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1305 of a one-dimensional array, since the padding has the same alias set
1306 as the field type, but if it's a multi-dimensional array, we need to
1307 see the inner types. */
1308 while (TREE_CODE (gnu_old_type
) == RECORD_TYPE
1309 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type
)
1310 || TYPE_PADDING_P (gnu_old_type
)))
1311 gnu_old_type
= TREE_TYPE (TYPE_FIELDS (gnu_old_type
));
1313 /* Unconstrained array types are deemed incomplete and would thus be given
1314 alias set 0. Retrieve the underlying array type. */
1315 if (TREE_CODE (gnu_old_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1317 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type
))));
1318 if (TREE_CODE (gnu_new_type
) == UNCONSTRAINED_ARRAY_TYPE
)
1320 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type
))));
1322 if (TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1323 && TREE_CODE (TREE_TYPE (gnu_new_type
)) == ARRAY_TYPE
1324 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type
)))
1325 relate_alias_sets (TREE_TYPE (gnu_new_type
), TREE_TYPE (gnu_old_type
), op
);
1329 case ALIAS_SET_COPY
:
1330 /* The alias set shouldn't be copied between array types with different
1331 aliasing settings because this can break the aliasing relationship
1332 between the array type and its element type. */
1333 #ifndef ENABLE_CHECKING
1334 if (flag_strict_aliasing
)
1336 gcc_assert (!(TREE_CODE (gnu_new_type
) == ARRAY_TYPE
1337 && TREE_CODE (gnu_old_type
) == ARRAY_TYPE
1338 && TYPE_NONALIASED_COMPONENT (gnu_new_type
)
1339 != TYPE_NONALIASED_COMPONENT (gnu_old_type
)));
1341 TYPE_ALIAS_SET (gnu_new_type
) = get_alias_set (gnu_old_type
);
1344 case ALIAS_SET_SUBSET
:
1345 case ALIAS_SET_SUPERSET
:
1347 alias_set_type old_set
= get_alias_set (gnu_old_type
);
1348 alias_set_type new_set
= get_alias_set (gnu_new_type
);
1350 /* Do nothing if the alias sets conflict. This ensures that we
1351 never call record_alias_subset several times for the same pair
1352 or at all for alias set 0. */
1353 if (!alias_sets_conflict_p (old_set
, new_set
))
1355 if (op
== ALIAS_SET_SUBSET
)
1356 record_alias_subset (old_set
, new_set
);
1358 record_alias_subset (new_set
, old_set
);
1367 record_component_aliases (gnu_new_type
);
1370 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1371 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1374 record_builtin_type (const char *name
, tree type
, bool artificial_p
)
1376 tree type_decl
= build_decl (input_location
,
1377 TYPE_DECL
, get_identifier (name
), type
);
1378 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1379 TYPE_ARTIFICIAL (type
) = artificial_p
;
1380 gnat_pushdecl (type_decl
, Empty
);
1382 if (debug_hooks
->type_decl
)
1383 debug_hooks
->type_decl (type_decl
, false);
1386 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1387 finish constructing the record type as a fat pointer type. */
1390 finish_fat_pointer_type (tree record_type
, tree field_list
)
1392 /* Make sure we can put it into a register. */
1393 if (STRICT_ALIGNMENT
)
1394 TYPE_ALIGN (record_type
) = MIN (BIGGEST_ALIGNMENT
, 2 * POINTER_SIZE
);
1396 /* Show what it really is. */
1397 TYPE_FAT_POINTER_P (record_type
) = 1;
1399 /* Do not emit debug info for it since the types of its fields may still be
1400 incomplete at this point. */
1401 finish_record_type (record_type
, field_list
, 0, false);
1403 /* Force type_contains_placeholder_p to return true on it. Although the
1404 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1405 type but the representation of the unconstrained array. */
1406 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type
) = 2;
1409 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1410 finish constructing the record or union type. If REP_LEVEL is zero, this
1411 record has no representation clause and so will be entirely laid out here.
1412 If REP_LEVEL is one, this record has a representation clause and has been
1413 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1414 this record is derived from a parent record and thus inherits its layout;
1415 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1416 we need to write debug information about this type. */
1419 finish_record_type (tree record_type
, tree field_list
, int rep_level
,
1422 enum tree_code code
= TREE_CODE (record_type
);
1423 tree name
= TYPE_NAME (record_type
);
1424 tree ada_size
= bitsize_zero_node
;
1425 tree size
= bitsize_zero_node
;
1426 bool had_size
= TYPE_SIZE (record_type
) != 0;
1427 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
1428 bool had_align
= TYPE_ALIGN (record_type
) != 0;
1431 TYPE_FIELDS (record_type
) = field_list
;
1433 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1434 generate debug info and have a parallel type. */
1435 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
1436 name
= DECL_NAME (name
);
1437 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
1439 /* Globally initialize the record first. If this is a rep'ed record,
1440 that just means some initializations; otherwise, layout the record. */
1443 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
1446 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
1449 TYPE_SIZE (record_type
) = bitsize_zero_node
;
1451 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1452 out just like a UNION_TYPE, since the size will be fixed. */
1453 else if (code
== QUAL_UNION_TYPE
)
1458 /* Ensure there isn't a size already set. There can be in an error
1459 case where there is a rep clause but all fields have errors and
1460 no longer have a position. */
1461 TYPE_SIZE (record_type
) = 0;
1463 /* Ensure we use the traditional GCC layout for bitfields when we need
1464 to pack the record type or have a representation clause. The other
1465 possible layout (Microsoft C compiler), if available, would prevent
1466 efficient packing in almost all cases. */
1467 #ifdef TARGET_MS_BITFIELD_LAYOUT
1468 if (TARGET_MS_BITFIELD_LAYOUT
&& TYPE_PACKED (record_type
))
1469 decl_attributes (&record_type
,
1470 tree_cons (get_identifier ("gcc_struct"),
1471 NULL_TREE
, NULL_TREE
),
1472 ATTR_FLAG_TYPE_IN_PLACE
);
1475 layout_type (record_type
);
1478 /* At this point, the position and size of each field is known. It was
1479 either set before entry by a rep clause, or by laying out the type above.
1481 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1482 to compute the Ada size; the GCC size and alignment (for rep'ed records
1483 that are not padding types); and the mode (for rep'ed records). We also
1484 clear the DECL_BIT_FIELD indication for the cases we know have not been
1485 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1487 if (code
== QUAL_UNION_TYPE
)
1488 field_list
= nreverse (field_list
);
1490 for (field
= field_list
; field
; field
= DECL_CHAIN (field
))
1492 tree type
= TREE_TYPE (field
);
1493 tree pos
= bit_position (field
);
1494 tree this_size
= DECL_SIZE (field
);
1497 if (RECORD_OR_UNION_TYPE_P (type
)
1498 && !TYPE_FAT_POINTER_P (type
)
1499 && !TYPE_CONTAINS_TEMPLATE_P (type
)
1500 && TYPE_ADA_SIZE (type
))
1501 this_ada_size
= TYPE_ADA_SIZE (type
);
1503 this_ada_size
= this_size
;
1505 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1506 if (DECL_BIT_FIELD (field
)
1507 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
1509 unsigned int align
= TYPE_ALIGN (type
);
1511 /* In the general case, type alignment is required. */
1512 if (value_factor_p (pos
, align
))
1514 /* The enclosing record type must be sufficiently aligned.
1515 Otherwise, if no alignment was specified for it and it
1516 has been laid out already, bump its alignment to the
1517 desired one if this is compatible with its size. */
1518 if (TYPE_ALIGN (record_type
) >= align
)
1520 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1521 DECL_BIT_FIELD (field
) = 0;
1525 && value_factor_p (TYPE_SIZE (record_type
), align
))
1527 TYPE_ALIGN (record_type
) = align
;
1528 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
1529 DECL_BIT_FIELD (field
) = 0;
1533 /* In the non-strict alignment case, only byte alignment is. */
1534 if (!STRICT_ALIGNMENT
1535 && DECL_BIT_FIELD (field
)
1536 && value_factor_p (pos
, BITS_PER_UNIT
))
1537 DECL_BIT_FIELD (field
) = 0;
1540 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1541 field is technically not addressable. Except that it can actually
1542 be addressed if it is BLKmode and happens to be properly aligned. */
1543 if (DECL_BIT_FIELD (field
)
1544 && !(DECL_MODE (field
) == BLKmode
1545 && value_factor_p (pos
, BITS_PER_UNIT
)))
1546 DECL_NONADDRESSABLE_P (field
) = 1;
1548 /* A type must be as aligned as its most aligned field that is not
1549 a bit-field. But this is already enforced by layout_type. */
1550 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
1551 TYPE_ALIGN (record_type
)
1552 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
1557 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
1558 size
= size_binop (MAX_EXPR
, size
, this_size
);
1561 case QUAL_UNION_TYPE
:
1563 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1564 this_ada_size
, ada_size
);
1565 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
1570 /* Since we know here that all fields are sorted in order of
1571 increasing bit position, the size of the record is one
1572 higher than the ending bit of the last field processed
1573 unless we have a rep clause, since in that case we might
1574 have a field outside a QUAL_UNION_TYPE that has a higher ending
1575 position. So use a MAX in that case. Also, if this field is a
1576 QUAL_UNION_TYPE, we need to take into account the previous size in
1577 the case of empty variants. */
1579 = merge_sizes (ada_size
, pos
, this_ada_size
,
1580 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1582 = merge_sizes (size
, pos
, this_size
,
1583 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
1591 if (code
== QUAL_UNION_TYPE
)
1592 nreverse (field_list
);
1596 /* If this is a padding record, we never want to make the size smaller
1597 than what was specified in it, if any. */
1598 if (TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
1599 size
= TYPE_SIZE (record_type
);
1601 /* Now set any of the values we've just computed that apply. */
1602 if (!TYPE_FAT_POINTER_P (record_type
)
1603 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
1604 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
1608 tree size_unit
= had_size_unit
1609 ? TYPE_SIZE_UNIT (record_type
)
1610 : convert (sizetype
,
1611 size_binop (CEIL_DIV_EXPR
, size
,
1612 bitsize_unit_node
));
1613 unsigned int align
= TYPE_ALIGN (record_type
);
1615 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
1616 TYPE_SIZE_UNIT (record_type
)
1617 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
1619 compute_record_mode (record_type
);
1624 rest_of_record_type_compilation (record_type
);
1627 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1630 add_parallel_type (tree type
, tree parallel_type
)
1632 tree decl
= TYPE_STUB_DECL (type
);
1634 while (DECL_PARALLEL_TYPE (decl
))
1635 decl
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl
));
1637 SET_DECL_PARALLEL_TYPE (decl
, parallel_type
);
1640 /* Return true if TYPE has a parallel type. */
1643 has_parallel_type (tree type
)
1645 tree decl
= TYPE_STUB_DECL (type
);
1647 return DECL_PARALLEL_TYPE (decl
) != NULL_TREE
;
1650 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1651 associated with it. It need not be invoked directly in most cases since
1652 finish_record_type takes care of doing so, but this can be necessary if
1653 a parallel type is to be attached to the record type. */
1656 rest_of_record_type_compilation (tree record_type
)
1658 bool var_size
= false;
1661 /* If this is a padded type, the bulk of the debug info has already been
1662 generated for the field's type. */
1663 if (TYPE_IS_PADDING_P (record_type
))
1666 /* If the type already has a parallel type (XVS type), then we're done. */
1667 if (has_parallel_type (record_type
))
1670 for (field
= TYPE_FIELDS (record_type
); field
; field
= DECL_CHAIN (field
))
1672 /* We need to make an XVE/XVU record if any field has variable size,
1673 whether or not the record does. For example, if we have a union,
1674 it may be that all fields, rounded up to the alignment, have the
1675 same size, in which case we'll use that size. But the debug
1676 output routines (except Dwarf2) won't be able to output the fields,
1677 so we need to make the special record. */
1678 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
1679 /* If a field has a non-constant qualifier, the record will have
1680 variable size too. */
1681 || (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1682 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
1689 /* If this record type is of variable size, make a parallel record type that
1690 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1693 tree new_record_type
1694 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
1695 ? UNION_TYPE
: TREE_CODE (record_type
));
1696 tree orig_name
= TYPE_NAME (record_type
), new_name
;
1697 tree last_pos
= bitsize_zero_node
;
1698 tree old_field
, prev_old_field
= NULL_TREE
;
1700 if (TREE_CODE (orig_name
) == TYPE_DECL
)
1701 orig_name
= DECL_NAME (orig_name
);
1704 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
1706 TYPE_NAME (new_record_type
) = new_name
;
1707 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
1708 TYPE_STUB_DECL (new_record_type
)
1709 = create_type_stub_decl (new_name
, new_record_type
);
1710 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
1711 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
1712 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
1713 TYPE_SIZE_UNIT (new_record_type
)
1714 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
1716 /* Now scan all the fields, replacing each field with a new field
1717 corresponding to the new encoding. */
1718 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
1719 old_field
= DECL_CHAIN (old_field
))
1721 tree field_type
= TREE_TYPE (old_field
);
1722 tree field_name
= DECL_NAME (old_field
);
1723 tree curpos
= bit_position (old_field
);
1724 tree pos
, new_field
;
1726 unsigned int align
= 0;
1728 /* We're going to do some pattern matching below so remove as many
1729 conversions as possible. */
1730 curpos
= remove_conversions (curpos
, true);
1732 /* See how the position was modified from the last position.
1734 There are two basic cases we support: a value was added
1735 to the last position or the last position was rounded to
1736 a boundary and they something was added. Check for the
1737 first case first. If not, see if there is any evidence
1738 of rounding. If so, round the last position and retry.
1740 If this is a union, the position can be taken as zero. */
1741 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
1742 pos
= bitsize_zero_node
;
1744 pos
= compute_related_constant (curpos
, last_pos
);
1747 && TREE_CODE (curpos
) == MULT_EXPR
1748 && tree_fits_uhwi_p (TREE_OPERAND (curpos
, 1)))
1750 tree offset
= TREE_OPERAND (curpos
, 0);
1751 align
= tree_to_uhwi (TREE_OPERAND (curpos
, 1));
1752 align
= scale_by_factor_of (offset
, align
);
1753 last_pos
= round_up (last_pos
, align
);
1754 pos
= compute_related_constant (curpos
, last_pos
);
1757 && TREE_CODE (curpos
) == PLUS_EXPR
1758 && tree_fits_uhwi_p (TREE_OPERAND (curpos
, 1))
1759 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
1761 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1)))
1763 tree offset
= TREE_OPERAND (TREE_OPERAND (curpos
, 0), 0);
1764 unsigned HOST_WIDE_INT addend
1765 = tree_to_uhwi (TREE_OPERAND (curpos
, 1));
1767 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1));
1768 align
= scale_by_factor_of (offset
, align
);
1769 align
= MIN (align
, addend
& -addend
);
1770 last_pos
= round_up (last_pos
, align
);
1771 pos
= compute_related_constant (curpos
, last_pos
);
1773 else if (potential_alignment_gap (prev_old_field
, old_field
, pos
))
1775 align
= TYPE_ALIGN (field_type
);
1776 last_pos
= round_up (last_pos
, align
);
1777 pos
= compute_related_constant (curpos
, last_pos
);
1780 /* If we can't compute a position, set it to zero.
1782 ??? We really should abort here, but it's too much work
1783 to get this correct for all cases. */
1785 pos
= bitsize_zero_node
;
1787 /* See if this type is variable-sized and make a pointer type
1788 and indicate the indirection if so. Beware that the debug
1789 back-end may adjust the position computed above according
1790 to the alignment of the field type, i.e. the pointer type
1791 in this case, if we don't preventively counter that. */
1792 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
1794 field_type
= build_pointer_type (field_type
);
1795 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
1797 field_type
= copy_node (field_type
);
1798 TYPE_ALIGN (field_type
) = align
;
1803 /* Make a new field name, if necessary. */
1804 if (var
|| align
!= 0)
1809 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1810 align
/ BITS_PER_UNIT
);
1812 strcpy (suffix
, "XVL");
1814 field_name
= concat_name (field_name
, suffix
);
1818 = create_field_decl (field_name
, field_type
, new_record_type
,
1819 DECL_SIZE (old_field
), pos
, 0, 0);
1820 DECL_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1821 TYPE_FIELDS (new_record_type
) = new_field
;
1823 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1824 zero. The only time it's not the last field of the record
1825 is when there are other components at fixed positions after
1826 it (meaning there was a rep clause for every field) and we
1827 want to be able to encode them. */
1828 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1829 (TREE_CODE (TREE_TYPE (old_field
))
1832 : DECL_SIZE (old_field
));
1833 prev_old_field
= old_field
;
1836 TYPE_FIELDS (new_record_type
) = nreverse (TYPE_FIELDS (new_record_type
));
1838 add_parallel_type (record_type
, new_record_type
);
1842 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1843 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1844 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1845 replace a value of zero with the old size. If HAS_REP is true, we take the
1846 MAX of the end position of this field with LAST_SIZE. In all other cases,
1847 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1850 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1853 tree type
= TREE_TYPE (last_size
);
1856 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1858 new_size
= size_binop (PLUS_EXPR
, first_bit
, size
);
1860 new_size
= size_binop (MAX_EXPR
, last_size
, new_size
);
1864 new_size
= fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1865 integer_zerop (TREE_OPERAND (size
, 1))
1866 ? last_size
: merge_sizes (last_size
, first_bit
,
1867 TREE_OPERAND (size
, 1),
1869 integer_zerop (TREE_OPERAND (size
, 2))
1870 ? last_size
: merge_sizes (last_size
, first_bit
,
1871 TREE_OPERAND (size
, 2),
1874 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1875 when fed through substitute_in_expr) into thinking that a constant
1876 size is not constant. */
1877 while (TREE_CODE (new_size
) == NON_LVALUE_EXPR
)
1878 new_size
= TREE_OPERAND (new_size
, 0);
1883 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1884 related by the addition of a constant. Return that constant if so. */
1887 compute_related_constant (tree op0
, tree op1
)
1889 tree op0_var
, op1_var
;
1890 tree op0_con
= split_plus (op0
, &op0_var
);
1891 tree op1_con
= split_plus (op1
, &op1_var
);
1892 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1894 if (operand_equal_p (op0_var
, op1_var
, 0))
1896 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1902 /* Utility function of above to split a tree OP which may be a sum, into a
1903 constant part, which is returned, and a variable part, which is stored
1904 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1908 split_plus (tree in
, tree
*pvar
)
1910 /* Strip conversions in order to ease the tree traversal and maximize the
1911 potential for constant or plus/minus discovery. We need to be careful
1912 to always return and set *pvar to bitsizetype trees, but it's worth
1914 in
= remove_conversions (in
, false);
1916 *pvar
= convert (bitsizetype
, in
);
1918 if (TREE_CODE (in
) == INTEGER_CST
)
1920 *pvar
= bitsize_zero_node
;
1921 return convert (bitsizetype
, in
);
1923 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1925 tree lhs_var
, rhs_var
;
1926 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1927 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1929 if (lhs_var
== TREE_OPERAND (in
, 0)
1930 && rhs_var
== TREE_OPERAND (in
, 1))
1931 return bitsize_zero_node
;
1933 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1934 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1937 return bitsize_zero_node
;
1940 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1941 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1942 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1943 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1944 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1945 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1946 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1947 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1948 invisible reference. */
1951 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1952 bool return_unconstrained_p
, bool return_by_direct_ref_p
,
1953 bool return_by_invisi_ref_p
)
1955 /* A list of the data type nodes of the subprogram formal parameters.
1956 This list is generated by traversing the input list of PARM_DECL
1958 vec
<tree
, va_gc
> *param_type_list
= NULL
;
1961 for (t
= param_decl_list
; t
; t
= DECL_CHAIN (t
))
1962 vec_safe_push (param_type_list
, TREE_TYPE (t
));
1964 type
= build_function_type_vec (return_type
, param_type_list
);
1966 /* TYPE may have been shared since GCC hashes types. If it has a different
1967 CICO_LIST, make a copy. Likewise for the various flags. */
1968 if (!fntype_same_flags_p (type
, cico_list
, return_unconstrained_p
,
1969 return_by_direct_ref_p
, return_by_invisi_ref_p
))
1971 type
= copy_type (type
);
1972 TYPE_CI_CO_LIST (type
) = cico_list
;
1973 TYPE_RETURN_UNCONSTRAINED_P (type
) = return_unconstrained_p
;
1974 TYPE_RETURN_BY_DIRECT_REF_P (type
) = return_by_direct_ref_p
;
1975 TREE_ADDRESSABLE (type
) = return_by_invisi_ref_p
;
1981 /* Return a copy of TYPE but safe to modify in any way. */
1984 copy_type (tree type
)
1986 tree new_type
= copy_node (type
);
1988 /* Unshare the language-specific data. */
1989 if (TYPE_LANG_SPECIFIC (type
))
1991 TYPE_LANG_SPECIFIC (new_type
) = NULL
;
1992 SET_TYPE_LANG_SPECIFIC (new_type
, GET_TYPE_LANG_SPECIFIC (type
));
1995 /* And the contents of the language-specific slot if needed. */
1996 if ((INTEGRAL_TYPE_P (type
) || TREE_CODE (type
) == REAL_TYPE
)
1997 && TYPE_RM_VALUES (type
))
1999 TYPE_RM_VALUES (new_type
) = NULL_TREE
;
2000 SET_TYPE_RM_SIZE (new_type
, TYPE_RM_SIZE (type
));
2001 SET_TYPE_RM_MIN_VALUE (new_type
, TYPE_RM_MIN_VALUE (type
));
2002 SET_TYPE_RM_MAX_VALUE (new_type
, TYPE_RM_MAX_VALUE (type
));
2005 /* copy_node clears this field instead of copying it, because it is
2006 aliased with TREE_CHAIN. */
2007 TYPE_STUB_DECL (new_type
) = TYPE_STUB_DECL (type
);
2009 TYPE_POINTER_TO (new_type
) = 0;
2010 TYPE_REFERENCE_TO (new_type
) = 0;
2011 TYPE_MAIN_VARIANT (new_type
) = new_type
;
2012 TYPE_NEXT_VARIANT (new_type
) = 0;
2017 /* Return a subtype of sizetype with range MIN to MAX and whose
2018 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2019 of the associated TYPE_DECL. */
2022 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
2024 /* First build a type for the desired range. */
2025 tree type
= build_nonshared_range_type (sizetype
, min
, max
);
2027 /* Then set the index type. */
2028 SET_TYPE_INDEX_TYPE (type
, index
);
2029 create_type_decl (NULL_TREE
, type
, true, false, gnat_node
);
2034 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2035 sizetype is used. */
2038 create_range_type (tree type
, tree min
, tree max
)
2042 if (type
== NULL_TREE
)
2045 /* First build a type with the base range. */
2046 range_type
= build_nonshared_range_type (type
, TYPE_MIN_VALUE (type
),
2047 TYPE_MAX_VALUE (type
));
2049 /* Then set the actual range. */
2050 SET_TYPE_RM_MIN_VALUE (range_type
, convert (type
, min
));
2051 SET_TYPE_RM_MAX_VALUE (range_type
, convert (type
, max
));
2056 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2057 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2061 create_type_stub_decl (tree type_name
, tree type
)
2063 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2064 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2065 emitted in DWARF. */
2066 tree type_decl
= build_decl (input_location
,
2067 TYPE_DECL
, type_name
, type
);
2068 DECL_ARTIFICIAL (type_decl
) = 1;
2069 TYPE_ARTIFICIAL (type
) = 1;
2073 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2074 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2075 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2076 true if we need to write debug information about this type. GNAT_NODE
2077 is used for the position of the decl. */
2080 create_type_decl (tree type_name
, tree type
, bool artificial_p
,
2081 bool debug_info_p
, Node_Id gnat_node
)
2083 enum tree_code code
= TREE_CODE (type
);
2084 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
2087 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2088 gcc_assert (!TYPE_IS_DUMMY_P (type
));
2090 /* If the type hasn't been named yet, we're naming it; preserve an existing
2091 TYPE_STUB_DECL that has been attached to it for some purpose. */
2092 if (!named
&& TYPE_STUB_DECL (type
))
2094 type_decl
= TYPE_STUB_DECL (type
);
2095 DECL_NAME (type_decl
) = type_name
;
2098 type_decl
= build_decl (input_location
, TYPE_DECL
, type_name
, type
);
2100 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
2101 TYPE_ARTIFICIAL (type
) = artificial_p
;
2103 /* Add this decl to the current binding level. */
2104 gnat_pushdecl (type_decl
, gnat_node
);
2106 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2107 This causes the name to be also viewed as a "tag" by the debug
2108 back-end, with the advantage that no DW_TAG_typedef is emitted
2109 for artificial "tagged" types in DWARF. */
2111 TYPE_STUB_DECL (type
) = type_decl
;
2113 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2114 back-end doesn't support, and for others if we don't need to. */
2115 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
2116 DECL_IGNORED_P (type_decl
) = 1;
2121 /* Return a VAR_DECL or CONST_DECL node.
2123 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2124 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2125 the GCC tree for an optional initial expression; NULL_TREE if none.
2127 CONST_FLAG is true if this variable is constant, in which case we might
2128 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2130 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2131 definition to be made visible outside of the current compilation unit, for
2132 instance variable definitions in a package specification.
2134 EXTERN_FLAG is true when processing an external variable declaration (as
2135 opposed to a definition: no storage is to be allocated for the variable).
2137 STATIC_FLAG is only relevant when not at top level. In that case
2138 it indicates whether to always allocate storage to the variable.
2140 GNAT_NODE is used for the position of the decl. */
2143 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
2144 bool const_flag
, bool public_flag
, bool extern_flag
,
2145 bool static_flag
, bool const_decl_allowed_p
,
2146 struct attrib
*attr_list
, Node_Id gnat_node
)
2148 /* Whether the initializer is a constant initializer. At the global level
2149 or for an external object or an object to be allocated in static memory,
2150 we check that it is a valid constant expression for use in initializing
2151 a static variable; otherwise, we only check that it is constant. */
2154 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
2155 && (global_bindings_p () || extern_flag
|| static_flag
2156 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
2157 : TREE_CONSTANT (var_init
)));
2159 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2160 case the initializer may be used in-lieu of the DECL node (as done in
2161 Identifier_to_gnu). This is useful to prevent the need of elaboration
2162 code when an identifier for which such a decl is made is in turn used as
2163 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2164 but extra constraints apply to this choice (see below) and are not
2165 relevant to the distinction we wish to make. */
2166 bool constant_p
= const_flag
&& init_const
;
2168 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2169 and may be used for scalars in general but not for aggregates. */
2171 = build_decl (input_location
,
2172 (constant_p
&& const_decl_allowed_p
2173 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
2176 /* If this is external, throw away any initializations (they will be done
2177 elsewhere) unless this is a constant for which we would like to remain
2178 able to get the initializer. If we are defining a global here, leave a
2179 constant initialization and save any variable elaborations for the
2180 elaboration routine. If we are just annotating types, throw away the
2181 initialization if it isn't a constant. */
2182 if ((extern_flag
&& !constant_p
)
2183 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
2184 var_init
= NULL_TREE
;
2186 /* At the global level, an initializer requiring code to be generated
2187 produces elaboration statements. Check that such statements are allowed,
2188 that is, not violating a No_Elaboration_Code restriction. */
2189 if (global_bindings_p () && var_init
!= 0 && !init_const
)
2190 Check_Elaboration_Code_Allowed (gnat_node
);
2192 DECL_INITIAL (var_decl
) = var_init
;
2193 TREE_READONLY (var_decl
) = const_flag
;
2194 DECL_EXTERNAL (var_decl
) = extern_flag
;
2195 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
2196 TREE_CONSTANT (var_decl
) = constant_p
;
2197 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
2198 = TYPE_VOLATILE (type
);
2200 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2201 try to fiddle with DECL_COMMON. However, on platforms that don't
2202 support global BSS sections, uninitialized global variables would
2203 go in DATA instead, thus increasing the size of the executable. */
2205 && TREE_CODE (var_decl
) == VAR_DECL
2206 && TREE_PUBLIC (var_decl
)
2207 && !have_global_bss_p ())
2208 DECL_COMMON (var_decl
) = 1;
2210 /* At the global binding level, we need to allocate static storage for the
2211 variable if it isn't external. Otherwise, we allocate automatic storage
2212 unless requested not to. */
2213 TREE_STATIC (var_decl
)
2214 = !extern_flag
&& (static_flag
|| global_bindings_p ());
2216 /* For an external constant whose initializer is not absolute, do not emit
2217 debug info. In DWARF this would mean a global relocation in a read-only
2218 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2222 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
2223 != null_pointer_node
)
2224 DECL_IGNORED_P (var_decl
) = 1;
2226 if (TREE_SIDE_EFFECTS (var_decl
))
2227 TREE_ADDRESSABLE (var_decl
) = 1;
2229 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2230 if (TREE_CODE (var_decl
) == VAR_DECL
)
2231 process_attributes (&var_decl
, &attr_list
, true, gnat_node
);
2233 /* Add this decl to the current binding level. */
2234 gnat_pushdecl (var_decl
, gnat_node
);
2236 if (TREE_CODE (var_decl
) == VAR_DECL
)
2239 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
2241 if (global_bindings_p ())
2242 rest_of_decl_compilation (var_decl
, true, 0);
2248 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2251 aggregate_type_contains_array_p (tree type
)
2253 switch (TREE_CODE (type
))
2257 case QUAL_UNION_TYPE
:
2260 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
2261 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
2262 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
2275 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2276 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2277 nonzero, it is the specified size of the field. If POS is nonzero, it is
2278 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2279 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2280 means we are allowed to take the address of the field; if it is negative,
2281 we should not make a bitfield, which is used by make_aligning_type. */
2284 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
2285 tree size
, tree pos
, int packed
, int addressable
)
2287 tree field_decl
= build_decl (input_location
,
2288 FIELD_DECL
, field_name
, field_type
);
2290 DECL_CONTEXT (field_decl
) = record_type
;
2291 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
2293 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2294 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2295 Likewise for an aggregate without specified position that contains an
2296 array, because in this case slices of variable length of this array
2297 must be handled by GCC and variable-sized objects need to be aligned
2298 to at least a byte boundary. */
2299 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
2301 && AGGREGATE_TYPE_P (field_type
)
2302 && aggregate_type_contains_array_p (field_type
))))
2303 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
2305 /* If a size is specified, use it. Otherwise, if the record type is packed
2306 compute a size to use, which may differ from the object's natural size.
2307 We always set a size in this case to trigger the checks for bitfield
2308 creation below, which is typically required when no position has been
2311 size
= convert (bitsizetype
, size
);
2312 else if (packed
== 1)
2314 size
= rm_size (field_type
);
2315 if (TYPE_MODE (field_type
) == BLKmode
)
2316 size
= round_up (size
, BITS_PER_UNIT
);
2319 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2320 specified for two reasons: first if the size differs from the natural
2321 size. Second, if the alignment is insufficient. There are a number of
2322 ways the latter can be true.
2324 We never make a bitfield if the type of the field has a nonconstant size,
2325 because no such entity requiring bitfield operations should reach here.
2327 We do *preventively* make a bitfield when there might be the need for it
2328 but we don't have all the necessary information to decide, as is the case
2329 of a field with no specified position in a packed record.
2331 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2332 in layout_decl or finish_record_type to clear the bit_field indication if
2333 it is in fact not needed. */
2334 if (addressable
>= 0
2336 && TREE_CODE (size
) == INTEGER_CST
2337 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
2338 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
2339 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
2341 || (TYPE_ALIGN (record_type
) != 0
2342 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
2344 DECL_BIT_FIELD (field_decl
) = 1;
2345 DECL_SIZE (field_decl
) = size
;
2346 if (!packed
&& !pos
)
2348 if (TYPE_ALIGN (record_type
) != 0
2349 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
2350 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
2352 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2356 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
2358 /* Bump the alignment if need be, either for bitfield/packing purposes or
2359 to satisfy the type requirements if no such consideration applies. When
2360 we get the alignment from the type, indicate if this is from an explicit
2361 user request, which prevents stor-layout from lowering it later on. */
2363 unsigned int bit_align
2364 = (DECL_BIT_FIELD (field_decl
) ? 1
2365 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
2367 if (bit_align
> DECL_ALIGN (field_decl
))
2368 DECL_ALIGN (field_decl
) = bit_align
;
2369 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
2371 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
2372 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
2378 /* We need to pass in the alignment the DECL is known to have.
2379 This is the lowest-order bit set in POS, but no more than
2380 the alignment of the record, if one is specified. Note
2381 that an alignment of 0 is taken as infinite. */
2382 unsigned int known_align
;
2384 if (tree_fits_uhwi_p (pos
))
2385 known_align
= tree_to_uhwi (pos
) & - tree_to_uhwi (pos
);
2387 known_align
= BITS_PER_UNIT
;
2389 if (TYPE_ALIGN (record_type
)
2390 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
2391 known_align
= TYPE_ALIGN (record_type
);
2393 layout_decl (field_decl
, known_align
);
2394 SET_DECL_OFFSET_ALIGN (field_decl
,
2395 tree_fits_uhwi_p (pos
) ? BIGGEST_ALIGNMENT
2397 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
2398 &DECL_FIELD_BIT_OFFSET (field_decl
),
2399 DECL_OFFSET_ALIGN (field_decl
), pos
);
2402 /* In addition to what our caller says, claim the field is addressable if we
2403 know that its type is not suitable.
2405 The field may also be "technically" nonaddressable, meaning that even if
2406 we attempt to take the field's address we will actually get the address
2407 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2408 value we have at this point is not accurate enough, so we don't account
2409 for this here and let finish_record_type decide. */
2410 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
2413 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
2418 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2419 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2420 (either an In parameter or an address of a pass-by-ref parameter). */
2423 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
2425 tree param_decl
= build_decl (input_location
,
2426 PARM_DECL
, param_name
, param_type
);
2428 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2429 can lead to various ABI violations. */
2430 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
2431 && INTEGRAL_TYPE_P (param_type
)
2432 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
2434 /* We have to be careful about biased types here. Make a subtype
2435 of integer_type_node with the proper biasing. */
2436 if (TREE_CODE (param_type
) == INTEGER_TYPE
2437 && TYPE_BIASED_REPRESENTATION_P (param_type
))
2440 = make_unsigned_type (TYPE_PRECISION (integer_type_node
));
2441 TREE_TYPE (subtype
) = integer_type_node
;
2442 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
2443 SET_TYPE_RM_MIN_VALUE (subtype
, TYPE_MIN_VALUE (param_type
));
2444 SET_TYPE_RM_MAX_VALUE (subtype
, TYPE_MAX_VALUE (param_type
));
2445 param_type
= subtype
;
2448 param_type
= integer_type_node
;
2451 DECL_ARG_TYPE (param_decl
) = param_type
;
2452 TREE_READONLY (param_decl
) = readonly
;
2456 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2457 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2458 changed. GNAT_NODE is used for the position of error messages. */
2461 process_attributes (tree
*node
, struct attrib
**attr_list
, bool in_place
,
2464 struct attrib
*attr
;
2466 for (attr
= *attr_list
; attr
; attr
= attr
->next
)
2469 case ATTR_MACHINE_ATTRIBUTE
:
2470 Sloc_to_locus (Sloc (gnat_node
), &input_location
);
2471 decl_attributes (node
, tree_cons (attr
->name
, attr
->args
, NULL_TREE
),
2472 in_place
? ATTR_FLAG_TYPE_IN_PLACE
: 0);
2475 case ATTR_LINK_ALIAS
:
2476 if (!DECL_EXTERNAL (*node
))
2478 TREE_STATIC (*node
) = 1;
2479 assemble_alias (*node
, attr
->name
);
2483 case ATTR_WEAK_EXTERNAL
:
2485 declare_weak (*node
);
2487 post_error ("?weak declarations not supported on this target",
2491 case ATTR_LINK_SECTION
:
2492 if (targetm_common
.have_named_sections
)
2494 DECL_SECTION_NAME (*node
)
2495 = build_string (IDENTIFIER_LENGTH (attr
->name
),
2496 IDENTIFIER_POINTER (attr
->name
));
2497 DECL_COMMON (*node
) = 0;
2500 post_error ("?section attributes are not supported for this target",
2504 case ATTR_LINK_CONSTRUCTOR
:
2505 DECL_STATIC_CONSTRUCTOR (*node
) = 1;
2506 TREE_USED (*node
) = 1;
2509 case ATTR_LINK_DESTRUCTOR
:
2510 DECL_STATIC_DESTRUCTOR (*node
) = 1;
2511 TREE_USED (*node
) = 1;
2514 case ATTR_THREAD_LOCAL_STORAGE
:
2515 DECL_TLS_MODEL (*node
) = decl_default_tls_model (*node
);
2516 DECL_COMMON (*node
) = 0;
2523 /* Record DECL as a global renaming pointer. */
2526 record_global_renaming_pointer (tree decl
)
2528 gcc_assert (!DECL_LOOP_PARM_P (decl
) && DECL_RENAMED_OBJECT (decl
));
2529 vec_safe_push (global_renaming_pointers
, decl
);
2532 /* Invalidate the global renaming pointers. */
2535 invalidate_global_renaming_pointers (void)
2540 if (global_renaming_pointers
== NULL
)
2543 FOR_EACH_VEC_ELT (*global_renaming_pointers
, i
, iter
)
2544 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
2546 vec_free (global_renaming_pointers
);
2549 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2553 value_factor_p (tree value
, HOST_WIDE_INT factor
)
2555 if (tree_fits_uhwi_p (value
))
2556 return tree_to_uhwi (value
) % factor
== 0;
2558 if (TREE_CODE (value
) == MULT_EXPR
)
2559 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
2560 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
2565 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2568 scale_by_factor_of (tree expr
, unsigned int value
)
2570 expr
= remove_conversions (expr
, true);
2572 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2573 corresponding to the number of trailing zeros of the mask. */
2574 if (TREE_CODE (expr
) == BIT_AND_EXPR
2575 && TREE_CODE (TREE_OPERAND (expr
, 1)) == INTEGER_CST
)
2577 unsigned HOST_WIDE_INT mask
= TREE_INT_CST_LOW (TREE_OPERAND (expr
, 1));
2580 while ((mask
& 1) == 0 && i
< HOST_BITS_PER_WIDE_INT
)
2591 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2592 unless we can prove these 2 fields are laid out in such a way that no gap
2593 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2594 is the distance in bits between the end of PREV_FIELD and the starting
2595 position of CURR_FIELD. It is ignored if null. */
2598 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
2600 /* If this is the first field of the record, there cannot be any gap */
2604 /* If the previous field is a union type, then return false: The only
2605 time when such a field is not the last field of the record is when
2606 there are other components at fixed positions after it (meaning there
2607 was a rep clause for every field), in which case we don't want the
2608 alignment constraint to override them. */
2609 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
2612 /* If the distance between the end of prev_field and the beginning of
2613 curr_field is constant, then there is a gap if the value of this
2614 constant is not null. */
2615 if (offset
&& tree_fits_uhwi_p (offset
))
2616 return !integer_zerop (offset
);
2618 /* If the size and position of the previous field are constant,
2619 then check the sum of this size and position. There will be a gap
2620 iff it is not multiple of the current field alignment. */
2621 if (tree_fits_uhwi_p (DECL_SIZE (prev_field
))
2622 && tree_fits_uhwi_p (bit_position (prev_field
)))
2623 return ((tree_to_uhwi (bit_position (prev_field
))
2624 + tree_to_uhwi (DECL_SIZE (prev_field
)))
2625 % DECL_ALIGN (curr_field
) != 0);
2627 /* If both the position and size of the previous field are multiples
2628 of the current field alignment, there cannot be any gap. */
2629 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
2630 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
2633 /* Fallback, return that there may be a potential gap */
2637 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2641 create_label_decl (tree label_name
, Node_Id gnat_node
)
2644 = build_decl (input_location
, LABEL_DECL
, label_name
, void_type_node
);
2646 DECL_MODE (label_decl
) = VOIDmode
;
2648 /* Add this decl to the current binding level. */
2649 gnat_pushdecl (label_decl
, gnat_node
);
2654 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2655 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2656 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2657 PARM_DECL nodes chained through the DECL_CHAIN field).
2659 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2660 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2661 used for the position of the decl. */
2664 create_subprog_decl (tree subprog_name
, tree asm_name
, tree subprog_type
,
2665 tree param_decl_list
, enum inline_status_t inline_status
,
2666 bool public_flag
, bool extern_flag
, bool artificial_flag
,
2667 struct attrib
*attr_list
, Node_Id gnat_node
)
2669 tree subprog_decl
= build_decl (input_location
, FUNCTION_DECL
, subprog_name
,
2671 tree result_decl
= build_decl (input_location
, RESULT_DECL
, NULL_TREE
,
2672 TREE_TYPE (subprog_type
));
2673 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
2675 /* If this is a non-inline function nested inside an inlined external
2676 function, we cannot honor both requests without cloning the nested
2677 function in the current unit since it is private to the other unit.
2678 We could inline the nested function as well but it's probably better
2679 to err on the side of too little inlining. */
2680 if (inline_status
!= is_enabled
2682 && current_function_decl
2683 && DECL_DECLARED_INLINE_P (current_function_decl
)
2684 && DECL_EXTERNAL (current_function_decl
))
2685 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
2687 DECL_ARTIFICIAL (subprog_decl
) = artificial_flag
;
2688 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
2690 switch (inline_status
)
2693 DECL_UNINLINABLE (subprog_decl
) = 1;
2700 DECL_DECLARED_INLINE_P (subprog_decl
) = 1;
2701 DECL_NO_INLINE_WARNING_P (subprog_decl
) = artificial_flag
;
2708 TREE_PUBLIC (subprog_decl
) = public_flag
;
2709 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
2710 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2711 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
2713 DECL_ARTIFICIAL (result_decl
) = 1;
2714 DECL_IGNORED_P (result_decl
) = 1;
2715 DECL_BY_REFERENCE (result_decl
) = TREE_ADDRESSABLE (subprog_type
);
2716 DECL_RESULT (subprog_decl
) = result_decl
;
2720 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
2722 /* The expand_main_function circuitry expects "main_identifier_node" to
2723 designate the DECL_NAME of the 'main' entry point, in turn expected
2724 to be declared as the "main" function literally by default. Ada
2725 program entry points are typically declared with a different name
2726 within the binder generated file, exported as 'main' to satisfy the
2727 system expectations. Force main_identifier_node in this case. */
2728 if (asm_name
== main_identifier_node
)
2729 DECL_NAME (subprog_decl
) = main_identifier_node
;
2732 process_attributes (&subprog_decl
, &attr_list
, true, gnat_node
);
2734 /* Add this decl to the current binding level. */
2735 gnat_pushdecl (subprog_decl
, gnat_node
);
2737 /* Output the assembler code and/or RTL for the declaration. */
2738 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
2740 return subprog_decl
;
2743 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2744 body. This routine needs to be invoked before processing the declarations
2745 appearing in the subprogram. */
2748 begin_subprog_body (tree subprog_decl
)
2752 announce_function (subprog_decl
);
2754 /* This function is being defined. */
2755 TREE_STATIC (subprog_decl
) = 1;
2757 current_function_decl
= subprog_decl
;
2759 /* Enter a new binding level and show that all the parameters belong to
2763 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
2764 param_decl
= DECL_CHAIN (param_decl
))
2765 DECL_CONTEXT (param_decl
) = subprog_decl
;
2767 make_decl_rtl (subprog_decl
);
2770 /* Finish translating the current subprogram and set its BODY. */
2773 end_subprog_body (tree body
)
2775 tree fndecl
= current_function_decl
;
2777 /* Attach the BLOCK for this level to the function and pop the level. */
2778 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2779 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2782 /* Mark the RESULT_DECL as being in this subprogram. */
2783 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2785 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2786 if (TREE_CODE (body
) == BIND_EXPR
)
2788 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body
)) = fndecl
;
2789 DECL_INITIAL (fndecl
) = BIND_EXPR_BLOCK (body
);
2792 DECL_SAVED_TREE (fndecl
) = body
;
2794 current_function_decl
= decl_function_context (fndecl
);
2797 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2800 rest_of_subprog_body_compilation (tree subprog_decl
)
2802 /* We cannot track the location of errors past this point. */
2803 error_gnat_node
= Empty
;
2805 /* If we're only annotating types, don't actually compile this function. */
2806 if (type_annotate_only
)
2809 /* Dump functions before gimplification. */
2810 dump_function (TDI_original
, subprog_decl
);
2812 if (!decl_function_context (subprog_decl
))
2813 cgraph_finalize_function (subprog_decl
, false);
2815 /* Register this function with cgraph just far enough to get it
2816 added to our parent's nested function list. */
2817 (void) cgraph_get_create_node (subprog_decl
);
2821 gnat_builtin_function (tree decl
)
2823 gnat_pushdecl (decl
, Empty
);
2827 /* Return an integer type with the number of bits of precision given by
2828 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2829 it is a signed type. */
2832 gnat_type_for_size (unsigned precision
, int unsignedp
)
2837 if (precision
<= 2 * MAX_BITS_PER_WORD
2838 && signed_and_unsigned_types
[precision
][unsignedp
])
2839 return signed_and_unsigned_types
[precision
][unsignedp
];
2842 t
= make_unsigned_type (precision
);
2844 t
= make_signed_type (precision
);
2846 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2847 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2851 sprintf (type_name
, "%sSIGNED_%u", unsignedp
? "UN" : "", precision
);
2852 TYPE_NAME (t
) = get_identifier (type_name
);
2858 /* Likewise for floating-point types. */
2861 float_type_for_precision (int precision
, enum machine_mode mode
)
2866 if (float_types
[(int) mode
])
2867 return float_types
[(int) mode
];
2869 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2870 TYPE_PRECISION (t
) = precision
;
2873 gcc_assert (TYPE_MODE (t
) == mode
);
2876 sprintf (type_name
, "FLOAT_%d", precision
);
2877 TYPE_NAME (t
) = get_identifier (type_name
);
2883 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2884 an unsigned type; otherwise a signed type is returned. */
2887 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2889 if (mode
== BLKmode
)
2892 if (mode
== VOIDmode
)
2893 return void_type_node
;
2895 if (COMPLEX_MODE_P (mode
))
2898 if (SCALAR_FLOAT_MODE_P (mode
))
2899 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2901 if (SCALAR_INT_MODE_P (mode
))
2902 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2904 if (VECTOR_MODE_P (mode
))
2906 enum machine_mode inner_mode
= GET_MODE_INNER (mode
);
2907 tree inner_type
= gnat_type_for_mode (inner_mode
, unsignedp
);
2909 return build_vector_type_for_mode (inner_type
, mode
);
2915 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2918 gnat_unsigned_type (tree type_node
)
2920 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2922 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2924 type
= copy_node (type
);
2925 TREE_TYPE (type
) = type_node
;
2927 else if (TREE_TYPE (type_node
)
2928 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2929 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2931 type
= copy_node (type
);
2932 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2938 /* Return the signed version of a TYPE_NODE, a scalar type. */
2941 gnat_signed_type (tree type_node
)
2943 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2945 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2947 type
= copy_node (type
);
2948 TREE_TYPE (type
) = type_node
;
2950 else if (TREE_TYPE (type_node
)
2951 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2952 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2954 type
= copy_node (type
);
2955 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2961 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2962 transparently converted to each other. */
2965 gnat_types_compatible_p (tree t1
, tree t2
)
2967 enum tree_code code
;
2969 /* This is the default criterion. */
2970 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2973 /* We only check structural equivalence here. */
2974 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2977 /* Vector types are also compatible if they have the same number of subparts
2978 and the same form of (scalar) element type. */
2979 if (code
== VECTOR_TYPE
2980 && TYPE_VECTOR_SUBPARTS (t1
) == TYPE_VECTOR_SUBPARTS (t2
)
2981 && TREE_CODE (TREE_TYPE (t1
)) == TREE_CODE (TREE_TYPE (t2
))
2982 && TYPE_PRECISION (TREE_TYPE (t1
)) == TYPE_PRECISION (TREE_TYPE (t2
)))
2985 /* Array types are also compatible if they are constrained and have the same
2986 domain(s) and the same component type. */
2987 if (code
== ARRAY_TYPE
2988 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2989 || (TYPE_DOMAIN (t1
)
2991 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2992 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2993 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2994 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
)))))
2995 && (TREE_TYPE (t1
) == TREE_TYPE (t2
)
2996 || (TREE_CODE (TREE_TYPE (t1
)) == ARRAY_TYPE
2997 && gnat_types_compatible_p (TREE_TYPE (t1
), TREE_TYPE (t2
)))))
3003 /* Return true if EXPR is a useless type conversion. */
3006 gnat_useless_type_conversion (tree expr
)
3008 if (CONVERT_EXPR_P (expr
)
3009 || TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3010 || TREE_CODE (expr
) == NON_LVALUE_EXPR
)
3011 return gnat_types_compatible_p (TREE_TYPE (expr
),
3012 TREE_TYPE (TREE_OPERAND (expr
, 0)));
3017 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3020 fntype_same_flags_p (const_tree t
, tree cico_list
, bool return_unconstrained_p
,
3021 bool return_by_direct_ref_p
, bool return_by_invisi_ref_p
)
3023 return TYPE_CI_CO_LIST (t
) == cico_list
3024 && TYPE_RETURN_UNCONSTRAINED_P (t
) == return_unconstrained_p
3025 && TYPE_RETURN_BY_DIRECT_REF_P (t
) == return_by_direct_ref_p
3026 && TREE_ADDRESSABLE (t
) == return_by_invisi_ref_p
;
3029 /* EXP is an expression for the size of an object. If this size contains
3030 discriminant references, replace them with the maximum (if MAX_P) or
3031 minimum (if !MAX_P) possible value of the discriminant. */
3034 max_size (tree exp
, bool max_p
)
3036 enum tree_code code
= TREE_CODE (exp
);
3037 tree type
= TREE_TYPE (exp
);
3039 switch (TREE_CODE_CLASS (code
))
3041 case tcc_declaration
:
3046 if (code
== CALL_EXPR
)
3051 t
= maybe_inline_call_in_expr (exp
);
3053 return max_size (t
, max_p
);
3055 n
= call_expr_nargs (exp
);
3057 argarray
= XALLOCAVEC (tree
, n
);
3058 for (i
= 0; i
< n
; i
++)
3059 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
3060 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
3065 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3066 modify. Otherwise, we treat it like a variable. */
3067 if (!CONTAINS_PLACEHOLDER_P (exp
))
3070 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
3072 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
3074 case tcc_comparison
:
3075 return max_p
? size_one_node
: size_zero_node
;
3078 if (code
== NON_LVALUE_EXPR
)
3079 return max_size (TREE_OPERAND (exp
, 0), max_p
);
3081 return fold_build1 (code
, type
,
3082 max_size (TREE_OPERAND (exp
, 0),
3083 code
== NEGATE_EXPR
? !max_p
: max_p
));
3087 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
3088 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
3089 code
== MINUS_EXPR
? !max_p
: max_p
);
3091 /* Special-case wanting the maximum value of a MIN_EXPR.
3092 In that case, if one side overflows, return the other. */
3093 if (max_p
&& code
== MIN_EXPR
)
3095 if (TREE_CODE (rhs
) == INTEGER_CST
&& TREE_OVERFLOW (rhs
))
3098 if (TREE_CODE (lhs
) == INTEGER_CST
&& TREE_OVERFLOW (lhs
))
3102 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3103 overflowing and the RHS a variable. */
3104 if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
3105 && TREE_CODE (lhs
) == INTEGER_CST
3106 && TREE_OVERFLOW (lhs
)
3107 && !TREE_CONSTANT (rhs
))
3110 return size_binop (code
, lhs
, rhs
);
3113 case tcc_expression
:
3114 switch (TREE_CODE_LENGTH (code
))
3117 if (code
== SAVE_EXPR
)
3120 return fold_build1 (code
, type
,
3121 max_size (TREE_OPERAND (exp
, 0), max_p
));
3124 if (code
== COMPOUND_EXPR
)
3125 return max_size (TREE_OPERAND (exp
, 1), max_p
);
3127 return fold_build2 (code
, type
,
3128 max_size (TREE_OPERAND (exp
, 0), max_p
),
3129 max_size (TREE_OPERAND (exp
, 1), max_p
));
3132 if (code
== COND_EXPR
)
3133 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
3134 max_size (TREE_OPERAND (exp
, 1), max_p
),
3135 max_size (TREE_OPERAND (exp
, 2), max_p
));
3141 /* Other tree classes cannot happen. */
3149 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3150 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3151 Return a constructor for the template. */
3154 build_template (tree template_type
, tree array_type
, tree expr
)
3156 vec
<constructor_elt
, va_gc
> *template_elts
= NULL
;
3157 tree bound_list
= NULL_TREE
;
3160 while (TREE_CODE (array_type
) == RECORD_TYPE
3161 && (TYPE_PADDING_P (array_type
)
3162 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
3163 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
3165 if (TREE_CODE (array_type
) == ARRAY_TYPE
3166 || (TREE_CODE (array_type
) == INTEGER_TYPE
3167 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
3168 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
3170 /* First make the list for a CONSTRUCTOR for the template. Go down the
3171 field list of the template instead of the type chain because this
3172 array might be an Ada array of arrays and we can't tell where the
3173 nested arrays stop being the underlying object. */
3175 for (field
= TYPE_FIELDS (template_type
); field
;
3177 ? (bound_list
= TREE_CHAIN (bound_list
))
3178 : (array_type
= TREE_TYPE (array_type
))),
3179 field
= DECL_CHAIN (DECL_CHAIN (field
)))
3181 tree bounds
, min
, max
;
3183 /* If we have a bound list, get the bounds from there. Likewise
3184 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3185 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3186 This will give us a maximum range. */
3188 bounds
= TREE_VALUE (bound_list
);
3189 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
3190 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
3191 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
3192 && DECL_BY_COMPONENT_PTR_P (expr
))
3193 bounds
= TREE_TYPE (field
);
3197 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
3198 max
= convert (TREE_TYPE (DECL_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
3200 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3201 substitute it from OBJECT. */
3202 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
3203 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
3205 CONSTRUCTOR_APPEND_ELT (template_elts
, field
, min
);
3206 CONSTRUCTOR_APPEND_ELT (template_elts
, DECL_CHAIN (field
), max
);
3209 return gnat_build_constructor (template_type
, template_elts
);
3212 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3213 being built; the new decl is chained on to the front of the list. */
3216 make_descriptor_field (const char *name
, tree type
, tree rec_type
,
3217 tree initial
, tree field_list
)
3220 = create_field_decl (get_identifier (name
), type
, rec_type
, NULL_TREE
,
3223 DECL_INITIAL (field
) = initial
;
3224 DECL_CHAIN (field
) = field_list
;
3228 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3229 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3230 type contains in its DECL_INITIAL the expression to use when a constructor
3231 is made for the type. GNAT_ENTITY is an entity used to print out an error
3232 message if the mechanism cannot be applied to an object of that type and
3233 also for the name. */
3236 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3238 tree record_type
= make_node (RECORD_TYPE
);
3239 tree pointer32_type
, pointer64_type
;
3240 tree field_list
= NULL_TREE
;
3241 int klass
, ndim
, i
, dtype
= 0;
3242 tree inner_type
, tem
;
3245 /* If TYPE is an unconstrained array, use the underlying array type. */
3246 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3247 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3249 /* If this is an array, compute the number of dimensions in the array,
3250 get the index types, and point to the inner type. */
3251 if (TREE_CODE (type
) != ARRAY_TYPE
)
3254 for (ndim
= 1, inner_type
= type
;
3255 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3256 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3257 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3260 idx_arr
= XALLOCAVEC (tree
, ndim
);
3262 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
3263 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3264 for (i
= ndim
- 1, inner_type
= type
;
3266 i
--, inner_type
= TREE_TYPE (inner_type
))
3267 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3269 for (i
= 0, inner_type
= type
;
3271 i
++, inner_type
= TREE_TYPE (inner_type
))
3272 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3274 /* Now get the DTYPE value. */
3275 switch (TREE_CODE (type
))
3280 if (TYPE_VAX_FLOATING_POINT_P (type
))
3281 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3294 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3297 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3300 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3303 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3306 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3309 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3315 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3319 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3320 && TYPE_VAX_FLOATING_POINT_P (type
))
3321 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3333 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3344 /* Get the CLASS value. */
3347 case By_Descriptor_A
:
3348 case By_Short_Descriptor_A
:
3351 case By_Descriptor_NCA
:
3352 case By_Short_Descriptor_NCA
:
3355 case By_Descriptor_SB
:
3356 case By_Short_Descriptor_SB
:
3360 case By_Short_Descriptor
:
3361 case By_Descriptor_S
:
3362 case By_Short_Descriptor_S
:
3368 /* Make the type for a descriptor for VMS. The first four fields are the
3369 same for all types. */
3371 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type
,
3372 size_in_bytes ((mech
== By_Descriptor_A
3373 || mech
== By_Short_Descriptor_A
)
3374 ? inner_type
: type
),
3377 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type
,
3378 size_int (dtype
), field_list
);
3380 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type
,
3381 size_int (klass
), field_list
);
3383 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
3384 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3386 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3387 that we cannot build a template call to the CE routine as it would get a
3388 wrong source location; instead we use a second placeholder for it. */
3389 tem
= build_unary_op (ADDR_EXPR
, pointer64_type
,
3390 build0 (PLACEHOLDER_EXPR
, type
));
3391 tem
= build3 (COND_EXPR
, pointer32_type
,
3393 ? build_binary_op (GE_EXPR
, boolean_type_node
, tem
,
3394 build_int_cstu (pointer64_type
, 0x80000000))
3395 : boolean_false_node
,
3396 build0 (PLACEHOLDER_EXPR
, void_type_node
),
3397 convert (pointer32_type
, tem
));
3400 = make_descriptor_field ("POINTER", pointer32_type
, record_type
, tem
,
3406 case By_Short_Descriptor
:
3407 case By_Descriptor_S
:
3408 case By_Short_Descriptor_S
:
3411 case By_Descriptor_SB
:
3412 case By_Short_Descriptor_SB
:
3414 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3416 (TREE_CODE (type
) == ARRAY_TYPE
3417 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3421 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3423 (TREE_CODE (type
) == ARRAY_TYPE
3424 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3429 case By_Descriptor_A
:
3430 case By_Short_Descriptor_A
:
3431 case By_Descriptor_NCA
:
3432 case By_Short_Descriptor_NCA
:
3434 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3435 record_type
, size_zero_node
, field_list
);
3438 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3439 record_type
, size_zero_node
, field_list
);
3442 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3444 size_int ((mech
== By_Descriptor_NCA
3445 || mech
== By_Short_Descriptor_NCA
)
3447 /* Set FL_COLUMN, FL_COEFF, and
3449 : (TREE_CODE (type
) == ARRAY_TYPE
3450 && TYPE_CONVENTION_FORTRAN_P
3456 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3457 record_type
, size_int (ndim
), field_list
);
3460 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3461 record_type
, size_in_bytes (type
),
3464 /* Now build a pointer to the 0,0,0... element. */
3465 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3466 for (i
= 0, inner_type
= type
; i
< ndim
;
3467 i
++, inner_type
= TREE_TYPE (inner_type
))
3468 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3469 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3470 NULL_TREE
, NULL_TREE
);
3473 = make_descriptor_field ("A0", pointer32_type
, record_type
,
3474 build1 (ADDR_EXPR
, pointer32_type
, tem
),
3477 /* Next come the addressing coefficients. */
3478 tem
= size_one_node
;
3479 for (i
= 0; i
< ndim
; i
++)
3483 = size_binop (MULT_EXPR
, tem
,
3484 size_binop (PLUS_EXPR
,
3485 size_binop (MINUS_EXPR
,
3486 TYPE_MAX_VALUE (idx_arr
[i
]),
3487 TYPE_MIN_VALUE (idx_arr
[i
])),
3490 fname
[0] = ((mech
== By_Descriptor_NCA
||
3491 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
3492 fname
[1] = '0' + i
, fname
[2] = 0;
3494 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3495 record_type
, idx_length
, field_list
);
3497 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
3501 /* Finally here are the bounds. */
3502 for (i
= 0; i
< ndim
; i
++)
3506 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3508 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3509 record_type
, TYPE_MIN_VALUE (idx_arr
[i
]),
3514 = make_descriptor_field (fname
, gnat_type_for_size (32, 1),
3515 record_type
, TYPE_MAX_VALUE (idx_arr
[i
]),
3521 post_error ("unsupported descriptor type for &", gnat_entity
);
3524 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
3525 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3529 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3530 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3531 type contains in its DECL_INITIAL the expression to use when a constructor
3532 is made for the type. GNAT_ENTITY is an entity used to print out an error
3533 message if the mechanism cannot be applied to an object of that type and
3534 also for the name. */
3537 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
3539 tree record_type
= make_node (RECORD_TYPE
);
3540 tree pointer64_type
;
3541 tree field_list
= NULL_TREE
;
3542 int klass
, ndim
, i
, dtype
= 0;
3543 tree inner_type
, tem
;
3546 /* If TYPE is an unconstrained array, use the underlying array type. */
3547 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3548 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
3550 /* If this is an array, compute the number of dimensions in the array,
3551 get the index types, and point to the inner type. */
3552 if (TREE_CODE (type
) != ARRAY_TYPE
)
3555 for (ndim
= 1, inner_type
= type
;
3556 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
3557 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
3558 ndim
++, inner_type
= TREE_TYPE (inner_type
))
3561 idx_arr
= XALLOCAVEC (tree
, ndim
);
3563 if (mech
!= By_Descriptor_NCA
3564 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
3565 for (i
= ndim
- 1, inner_type
= type
;
3567 i
--, inner_type
= TREE_TYPE (inner_type
))
3568 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3570 for (i
= 0, inner_type
= type
;
3572 i
++, inner_type
= TREE_TYPE (inner_type
))
3573 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
3575 /* Now get the DTYPE value. */
3576 switch (TREE_CODE (type
))
3581 if (TYPE_VAX_FLOATING_POINT_P (type
))
3582 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3595 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
3598 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
3601 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
3604 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
3607 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
3610 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
3616 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
3620 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
3621 && TYPE_VAX_FLOATING_POINT_P (type
))
3622 switch (tree_to_uhwi (TYPE_DIGITS_VALUE (type
)))
3634 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
3645 /* Get the CLASS value. */
3648 case By_Descriptor_A
:
3651 case By_Descriptor_NCA
:
3654 case By_Descriptor_SB
:
3658 case By_Descriptor_S
:
3664 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3665 are the same for all types. */
3667 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3668 record_type
, size_int (1), field_list
);
3670 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3671 record_type
, size_int (dtype
), field_list
);
3673 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3674 record_type
, size_int (klass
), field_list
);
3676 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3677 record_type
, size_int (-1), field_list
);
3679 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3681 size_in_bytes (mech
== By_Descriptor_A
3682 ? inner_type
: type
),
3685 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
3688 = make_descriptor_field ("POINTER", pointer64_type
, record_type
,
3689 build_unary_op (ADDR_EXPR
, pointer64_type
,
3690 build0 (PLACEHOLDER_EXPR
, type
)),
3696 case By_Descriptor_S
:
3699 case By_Descriptor_SB
:
3701 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3703 (TREE_CODE (type
) == ARRAY_TYPE
3704 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
))
3708 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3710 (TREE_CODE (type
) == ARRAY_TYPE
3711 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
))
3716 case By_Descriptor_A
:
3717 case By_Descriptor_NCA
:
3719 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3720 record_type
, size_zero_node
, field_list
);
3723 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3724 record_type
, size_zero_node
, field_list
);
3726 dtype
= (mech
== By_Descriptor_NCA
3728 /* Set FL_COLUMN, FL_COEFF, and
3730 : (TREE_CODE (type
) == ARRAY_TYPE
3731 && TYPE_CONVENTION_FORTRAN_P (type
)
3734 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3735 record_type
, size_int (dtype
),
3739 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3740 record_type
, size_int (ndim
), field_list
);
3743 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3744 record_type
, size_int (0), field_list
);
3746 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3747 record_type
, size_in_bytes (type
),
3750 /* Now build a pointer to the 0,0,0... element. */
3751 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3752 for (i
= 0, inner_type
= type
; i
< ndim
;
3753 i
++, inner_type
= TREE_TYPE (inner_type
))
3754 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3755 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3756 NULL_TREE
, NULL_TREE
);
3759 = make_descriptor_field ("A0", pointer64_type
, record_type
,
3760 build1 (ADDR_EXPR
, pointer64_type
, tem
),
3763 /* Next come the addressing coefficients. */
3764 tem
= size_one_node
;
3765 for (i
= 0; i
< ndim
; i
++)
3769 = size_binop (MULT_EXPR
, tem
,
3770 size_binop (PLUS_EXPR
,
3771 size_binop (MINUS_EXPR
,
3772 TYPE_MAX_VALUE (idx_arr
[i
]),
3773 TYPE_MIN_VALUE (idx_arr
[i
])),
3776 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3777 fname
[1] = '0' + i
, fname
[2] = 0;
3779 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3780 record_type
, idx_length
, field_list
);
3782 if (mech
== By_Descriptor_NCA
)
3786 /* Finally here are the bounds. */
3787 for (i
= 0; i
< ndim
; i
++)
3791 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3793 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3795 TYPE_MIN_VALUE (idx_arr
[i
]), field_list
);
3799 = make_descriptor_field (fname
, gnat_type_for_size (64, 1),
3801 TYPE_MAX_VALUE (idx_arr
[i
]), field_list
);
3806 post_error ("unsupported descriptor type for &", gnat_entity
);
3809 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC64");
3810 finish_record_type (record_type
, nreverse (field_list
), 0, false);
3814 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3815 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3818 fill_vms_descriptor (tree gnu_type
, tree gnu_expr
, Node_Id gnat_actual
)
3820 vec
<constructor_elt
, va_gc
> *v
= NULL
;
3823 gnu_expr
= maybe_unconstrained_array (gnu_expr
);
3824 gnu_expr
= gnat_protect_expr (gnu_expr
);
3825 gnat_mark_addressable (gnu_expr
);
3827 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3828 routine in case we have a 32-bit descriptor. */
3829 gnu_expr
= build2 (COMPOUND_EXPR
, void_type_node
,
3830 build_call_raise (CE_Range_Check_Failed
, gnat_actual
,
3831 N_Raise_Constraint_Error
),
3834 for (field
= TYPE_FIELDS (gnu_type
); field
; field
= DECL_CHAIN (field
))
3837 = convert (TREE_TYPE (field
),
3838 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field
),
3840 CONSTRUCTOR_APPEND_ELT (v
, field
, value
);
3843 return gnat_build_constructor (gnu_type
, v
);
3846 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3847 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3848 which the VMS descriptor is passed. */
3851 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3853 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3854 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3855 /* The CLASS field is the 3rd field in the descriptor. */
3856 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
3857 /* The POINTER field is the 6th field in the descriptor. */
3858 tree pointer
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass
)));
3860 /* Retrieve the value of the POINTER field. */
3862 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3864 if (POINTER_TYPE_P (gnu_type
))
3865 return convert (gnu_type
, gnu_expr64
);
3867 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
3869 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3870 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
3871 tree template_type
= TREE_TYPE (p_bounds_type
);
3872 tree min_field
= TYPE_FIELDS (template_type
);
3873 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
3874 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
3875 /* See the head comment of build_vms_descriptor. */
3876 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
3877 tree lfield
, ufield
;
3878 vec
<constructor_elt
, va_gc
> *v
;
3880 /* Convert POINTER to the pointer-to-array type. */
3881 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3885 case 1: /* Class S */
3886 case 15: /* Class SB */
3887 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3889 t
= DECL_CHAIN (DECL_CHAIN (klass
));
3890 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3891 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
3892 convert (TREE_TYPE (min_field
),
3894 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
3895 convert (TREE_TYPE (max_field
), t
));
3896 template_tree
= gnat_build_constructor (template_type
, v
);
3897 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
3899 /* For class S, we are done. */
3903 /* Test that we really have a SB descriptor, like DEC Ada. */
3904 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
3905 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
3906 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
3907 /* If so, there is already a template in the descriptor and
3908 it is located right after the POINTER field. The fields are
3909 64bits so they must be repacked. */
3910 t
= DECL_CHAIN (pointer
);
3911 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3912 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3915 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3917 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3919 /* Build the template in the form of a constructor. */
3921 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3922 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3924 template_tree
= gnat_build_constructor (template_type
, v
);
3926 /* Otherwise use the {1, LENGTH} template we build above. */
3927 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3928 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3933 case 4: /* Class A */
3934 /* The AFLAGS field is the 3rd field after the pointer in the
3936 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
3937 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3938 /* The DIMCT field is the next field in the descriptor after
3941 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3942 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3943 or FL_COEFF or FL_BOUNDS not set. */
3944 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3945 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
3946 build_binary_op (NE_EXPR
, boolean_type_node
,
3948 convert (TREE_TYPE (dimct
),
3950 build_binary_op (NE_EXPR
, boolean_type_node
,
3951 build2 (BIT_AND_EXPR
,
3955 /* There is already a template in the descriptor and it is located
3956 in block 3. The fields are 64bits so they must be repacked. */
3957 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3959 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3960 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3963 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3965 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3967 /* Build the template in the form of a constructor. */
3969 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (template_type
), lfield
);
3970 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (template_type
)),
3972 template_tree
= gnat_build_constructor (template_type
, v
);
3973 template_tree
= build3 (COND_EXPR
, template_type
, u
,
3974 build_call_raise (CE_Length_Check_Failed
, Empty
,
3975 N_Raise_Constraint_Error
),
3978 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
3981 case 10: /* Class NCA */
3983 post_error ("unsupported descriptor type for &", gnat_subprog
);
3984 template_addr
= integer_zero_node
;
3988 /* Build the fat pointer in the form of a constructor. */
3990 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr64
);
3991 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
3993 return gnat_build_constructor (gnu_type
, v
);
4000 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
4001 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
4002 which the VMS descriptor is passed. */
4005 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
4007 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4008 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4009 /* The CLASS field is the 3rd field in the descriptor. */
4010 tree klass
= DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type
)));
4011 /* The POINTER field is the 4th field in the descriptor. */
4012 tree pointer
= DECL_CHAIN (klass
);
4014 /* Retrieve the value of the POINTER field. */
4016 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
4018 if (POINTER_TYPE_P (gnu_type
))
4019 return convert (gnu_type
, gnu_expr32
);
4021 else if (TYPE_IS_FAT_POINTER_P (gnu_type
))
4023 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
4024 tree p_bounds_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type
)));
4025 tree template_type
= TREE_TYPE (p_bounds_type
);
4026 tree min_field
= TYPE_FIELDS (template_type
);
4027 tree max_field
= DECL_CHAIN (TYPE_FIELDS (template_type
));
4028 tree template_tree
, template_addr
, aflags
, dimct
, t
, u
;
4029 /* See the head comment of build_vms_descriptor. */
4030 int iklass
= TREE_INT_CST_LOW (DECL_INITIAL (klass
));
4031 vec
<constructor_elt
, va_gc
> *v
;
4033 /* Convert POINTER to the pointer-to-array type. */
4034 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
4038 case 1: /* Class S */
4039 case 15: /* Class SB */
4040 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
4042 t
= TYPE_FIELDS (desc_type
);
4043 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4044 CONSTRUCTOR_APPEND_ELT (v
, min_field
,
4045 convert (TREE_TYPE (min_field
),
4047 CONSTRUCTOR_APPEND_ELT (v
, max_field
,
4048 convert (TREE_TYPE (max_field
), t
));
4049 template_tree
= gnat_build_constructor (template_type
, v
);
4050 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template_tree
);
4052 /* For class S, we are done. */
4056 /* Test that we really have a SB descriptor, like DEC Ada. */
4057 t
= build3 (COMPONENT_REF
, TREE_TYPE (klass
), desc
, klass
, NULL
);
4058 u
= convert (TREE_TYPE (klass
), DECL_INITIAL (klass
));
4059 u
= build_binary_op (EQ_EXPR
, boolean_type_node
, t
, u
);
4060 /* If so, there is already a template in the descriptor and
4061 it is located right after the POINTER field. */
4062 t
= DECL_CHAIN (pointer
);
4064 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4065 /* Otherwise use the {1, LENGTH} template we build above. */
4066 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
4067 build_unary_op (ADDR_EXPR
, p_bounds_type
,
4072 case 4: /* Class A */
4073 /* The AFLAGS field is the 7th field in the descriptor. */
4074 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer
)));
4075 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4076 /* The DIMCT field is the 8th field in the descriptor. */
4078 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4079 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4080 or FL_COEFF or FL_BOUNDS not set. */
4081 u
= build_int_cst (TREE_TYPE (aflags
), 192);
4082 u
= build_binary_op (TRUTH_OR_EXPR
, boolean_type_node
,
4083 build_binary_op (NE_EXPR
, boolean_type_node
,
4085 convert (TREE_TYPE (dimct
),
4087 build_binary_op (NE_EXPR
, boolean_type_node
,
4088 build2 (BIT_AND_EXPR
,
4092 /* There is already a template in the descriptor and it is
4093 located at the start of block 3 (12th field). */
4094 t
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t
))));
4096 = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
4097 template_tree
= build3 (COND_EXPR
, TREE_TYPE (t
), u
,
4098 build_call_raise (CE_Length_Check_Failed
, Empty
,
4099 N_Raise_Constraint_Error
),
4102 = build_unary_op (ADDR_EXPR
, p_bounds_type
, template_tree
);
4105 case 10: /* Class NCA */
4107 post_error ("unsupported descriptor type for &", gnat_subprog
);
4108 template_addr
= integer_zero_node
;
4112 /* Build the fat pointer in the form of a constructor. */
4114 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (gnu_type
), gnu_expr32
);
4115 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (gnu_type
)),
4118 return gnat_build_constructor (gnu_type
, v
);
4125 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4126 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4127 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
4128 descriptor is passed. */
4131 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
4132 Entity_Id gnat_subprog
)
4134 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
4135 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
4136 tree mbo
= TYPE_FIELDS (desc_type
);
4137 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
4138 tree mbmo
= DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo
)));
4139 tree is64bit
, gnu_expr32
, gnu_expr64
;
4141 /* If the field name is not MBO, it must be 32-bit and no alternate.
4142 Otherwise primary must be 64-bit and alternate 32-bit. */
4143 if (strcmp (mbostr
, "MBO") != 0)
4145 tree ret
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4149 /* Build the test for 64-bit descriptor. */
4150 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
4151 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
4153 = build_binary_op (TRUTH_ANDIF_EXPR
, boolean_type_node
,
4154 build_binary_op (EQ_EXPR
, boolean_type_node
,
4155 convert (integer_type_node
, mbo
),
4157 build_binary_op (EQ_EXPR
, boolean_type_node
,
4158 convert (integer_type_node
, mbmo
),
4159 integer_minus_one_node
));
4161 /* Build the 2 possible end results. */
4162 gnu_expr64
= convert_vms_descriptor64 (gnu_type
, gnu_expr
, gnat_subprog
);
4163 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
4164 gnu_expr32
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
4165 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
4168 /* Build a type to be used to represent an aliased object whose nominal type
4169 is an unconstrained array. This consists of a RECORD_TYPE containing a
4170 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4171 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4172 an arbitrary unconstrained object. Use NAME as the name of the record.
4173 DEBUG_INFO_P is true if we need to write debug information for the type. */
4176 build_unc_object_type (tree template_type
, tree object_type
, tree name
,
4179 tree type
= make_node (RECORD_TYPE
);
4181 = create_field_decl (get_identifier ("BOUNDS"), template_type
, type
,
4182 NULL_TREE
, NULL_TREE
, 0, 1);
4184 = create_field_decl (get_identifier ("ARRAY"), object_type
, type
,
4185 NULL_TREE
, NULL_TREE
, 0, 1);
4187 TYPE_NAME (type
) = name
;
4188 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
4189 DECL_CHAIN (template_field
) = array_field
;
4190 finish_record_type (type
, template_field
, 0, true);
4192 /* Declare it now since it will never be declared otherwise. This is
4193 necessary to ensure that its subtrees are properly marked. */
4194 create_type_decl (name
, type
, true, debug_info_p
, Empty
);
4199 /* Same, taking a thin or fat pointer type instead of a template type. */
4202 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
4203 tree name
, bool debug_info_p
)
4207 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
4210 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type
)
4211 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
4212 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
4215 build_unc_object_type (template_type
, object_type
, name
, debug_info_p
);
4218 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4219 In the normal case this is just two adjustments, but we have more to
4220 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4223 update_pointer_to (tree old_type
, tree new_type
)
4225 tree ptr
= TYPE_POINTER_TO (old_type
);
4226 tree ref
= TYPE_REFERENCE_TO (old_type
);
4229 /* If this is the main variant, process all the other variants first. */
4230 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
4231 for (t
= TYPE_NEXT_VARIANT (old_type
); t
; t
= TYPE_NEXT_VARIANT (t
))
4232 update_pointer_to (t
, new_type
);
4234 /* If no pointers and no references, we are done. */
4238 /* Merge the old type qualifiers in the new type.
4240 Each old variant has qualifiers for specific reasons, and the new
4241 designated type as well. Each set of qualifiers represents useful
4242 information grabbed at some point, and merging the two simply unifies
4243 these inputs into the final type description.
4245 Consider for instance a volatile type frozen after an access to constant
4246 type designating it; after the designated type's freeze, we get here with
4247 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4248 when the access type was processed. We will make a volatile and readonly
4249 designated type, because that's what it really is.
4251 We might also get here for a non-dummy OLD_TYPE variant with different
4252 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4253 to private record type elaboration (see the comments around the call to
4254 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4255 the qualifiers in those cases too, to avoid accidentally discarding the
4256 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4258 = build_qualified_type (new_type
,
4259 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
4261 /* If old type and new type are identical, there is nothing to do. */
4262 if (old_type
== new_type
)
4265 /* Otherwise, first handle the simple case. */
4266 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
4268 tree new_ptr
, new_ref
;
4270 /* If pointer or reference already points to new type, nothing to do.
4271 This can happen as update_pointer_to can be invoked multiple times
4272 on the same couple of types because of the type variants. */
4273 if ((ptr
&& TREE_TYPE (ptr
) == new_type
)
4274 || (ref
&& TREE_TYPE (ref
) == new_type
))
4277 /* Chain PTR and its variants at the end. */
4278 new_ptr
= TYPE_POINTER_TO (new_type
);
4281 while (TYPE_NEXT_PTR_TO (new_ptr
))
4282 new_ptr
= TYPE_NEXT_PTR_TO (new_ptr
);
4283 TYPE_NEXT_PTR_TO (new_ptr
) = ptr
;
4286 TYPE_POINTER_TO (new_type
) = ptr
;
4288 /* Now adjust them. */
4289 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
4290 for (t
= TYPE_MAIN_VARIANT (ptr
); t
; t
= TYPE_NEXT_VARIANT (t
))
4292 TREE_TYPE (t
) = new_type
;
4293 if (TYPE_NULL_BOUNDS (t
))
4294 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t
), 0)) = new_type
;
4297 /* Chain REF and its variants at the end. */
4298 new_ref
= TYPE_REFERENCE_TO (new_type
);
4301 while (TYPE_NEXT_REF_TO (new_ref
))
4302 new_ref
= TYPE_NEXT_REF_TO (new_ref
);
4303 TYPE_NEXT_REF_TO (new_ref
) = ref
;
4306 TYPE_REFERENCE_TO (new_type
) = ref
;
4308 /* Now adjust them. */
4309 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
4310 for (t
= TYPE_MAIN_VARIANT (ref
); t
; t
= TYPE_NEXT_VARIANT (t
))
4311 TREE_TYPE (t
) = new_type
;
4313 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4314 TYPE_REFERENCE_TO (old_type
) = NULL_TREE
;
4317 /* Now deal with the unconstrained array case. In this case the pointer
4318 is actually a record where both fields are pointers to dummy nodes.
4319 Turn them into pointers to the correct types using update_pointer_to.
4320 Likewise for the pointer to the object record (thin pointer). */
4323 tree new_ptr
= TYPE_POINTER_TO (new_type
);
4325 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr
));
4327 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4328 since update_pointer_to can be invoked multiple times on the same
4329 couple of types because of the type variants. */
4330 if (TYPE_UNCONSTRAINED_ARRAY (ptr
) == new_type
)
4334 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
4335 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))));
4338 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr
)))),
4339 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr
)))));
4341 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
),
4342 TYPE_OBJECT_RECORD_TYPE (new_type
));
4344 TYPE_POINTER_TO (old_type
) = NULL_TREE
;
4348 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4349 unconstrained one. This involves making or finding a template. */
4352 convert_to_fat_pointer (tree type
, tree expr
)
4354 tree template_type
= TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
))));
4355 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
4356 tree etype
= TREE_TYPE (expr
);
4358 vec
<constructor_elt
, va_gc
> *v
;
4361 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4362 array (compare_fat_pointers ensures that this is the full discriminant)
4363 and a valid pointer to the bounds. This latter property is necessary
4364 since the compiler can hoist the load of the bounds done through it. */
4365 if (integer_zerop (expr
))
4367 tree ptr_template_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4368 tree null_bounds
, t
;
4370 if (TYPE_NULL_BOUNDS (ptr_template_type
))
4371 null_bounds
= TYPE_NULL_BOUNDS (ptr_template_type
);
4374 /* The template type can still be dummy at this point so we build an
4375 empty constructor. The middle-end will fill it in with zeros. */
4376 t
= build_constructor (template_type
,
4378 TREE_CONSTANT (t
) = TREE_STATIC (t
) = 1;
4379 null_bounds
= build_unary_op (ADDR_EXPR
, NULL_TREE
, t
);
4380 SET_TYPE_NULL_BOUNDS (ptr_template_type
, null_bounds
);
4383 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4384 fold_convert (p_array_type
, null_pointer_node
));
4385 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)), null_bounds
);
4386 t
= build_constructor (type
, v
);
4387 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4388 TREE_CONSTANT (t
) = 0;
4389 TREE_STATIC (t
) = 1;
4394 /* If EXPR is a thin pointer, make template and data from the record. */
4395 if (TYPE_IS_THIN_POINTER_P (etype
))
4397 tree field
= TYPE_FIELDS (TREE_TYPE (etype
));
4399 expr
= gnat_protect_expr (expr
);
4400 if (TREE_CODE (expr
) == ADDR_EXPR
)
4401 expr
= TREE_OPERAND (expr
, 0);
4404 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4405 the thin pointer value has been shifted so we first need to shift
4406 it back to get the template address. */
4407 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)))
4409 = build_binary_op (POINTER_PLUS_EXPR
, etype
, expr
,
4410 fold_build1 (NEGATE_EXPR
, sizetype
,
4412 (DECL_CHAIN (field
))));
4413 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
4416 template_tree
= build_component_ref (expr
, NULL_TREE
, field
, false);
4417 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
4418 build_component_ref (expr
, NULL_TREE
,
4419 DECL_CHAIN (field
), false));
4422 /* Otherwise, build the constructor for the template. */
4424 template_tree
= build_template (template_type
, TREE_TYPE (etype
), expr
);
4426 /* The final result is a constructor for the fat pointer.
4428 If EXPR is an argument of a foreign convention subprogram, the type it
4429 points to is directly the component type. In this case, the expression
4430 type may not match the corresponding FIELD_DECL type at this point, so we
4431 call "convert" here to fix that up if necessary. This type consistency is
4432 required, for instance because it ensures that possible later folding of
4433 COMPONENT_REFs against this constructor always yields something of the
4434 same type as the initial reference.
4436 Note that the call to "build_template" above is still fine because it
4437 will only refer to the provided TEMPLATE_TYPE in this case. */
4438 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4439 convert (p_array_type
, expr
));
4440 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4441 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4443 return gnat_build_constructor (type
, v
);
4446 /* Create an expression whose value is that of EXPR,
4447 converted to type TYPE. The TREE_TYPE of the value
4448 is always TYPE. This function implements all reasonable
4449 conversions; callers should filter out those that are
4450 not permitted by the language being compiled. */
4453 convert (tree type
, tree expr
)
4455 tree etype
= TREE_TYPE (expr
);
4456 enum tree_code ecode
= TREE_CODE (etype
);
4457 enum tree_code code
= TREE_CODE (type
);
4459 /* If the expression is already of the right type, we are done. */
4463 /* If both input and output have padding and are of variable size, do this
4464 as an unchecked conversion. Likewise if one is a mere variant of the
4465 other, so we avoid a pointless unpad/repad sequence. */
4466 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
4467 && TYPE_PADDING_P (type
) && TYPE_PADDING_P (etype
)
4468 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4469 || !TREE_CONSTANT (TYPE_SIZE (etype
))
4470 || TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4471 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
4472 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
4475 /* If the output type has padding, convert to the inner type and make a
4476 constructor to build the record, unless a variable size is involved. */
4477 else if (code
== RECORD_TYPE
&& TYPE_PADDING_P (type
))
4479 vec
<constructor_elt
, va_gc
> *v
;
4481 /* If we previously converted from another type and our type is
4482 of variable size, remove the conversion to avoid the need for
4483 variable-sized temporaries. Likewise for a conversion between
4484 original and packable version. */
4485 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4486 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4487 || (ecode
== RECORD_TYPE
4488 && TYPE_NAME (etype
)
4489 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
4490 expr
= TREE_OPERAND (expr
, 0);
4492 /* If we are just removing the padding from expr, convert the original
4493 object if we have variable size in order to avoid the need for some
4494 variable-sized temporaries. Likewise if the padding is a variant
4495 of the other, so we avoid a pointless unpad/repad sequence. */
4496 if (TREE_CODE (expr
) == COMPONENT_REF
4497 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4498 && (!TREE_CONSTANT (TYPE_SIZE (type
))
4499 || TYPE_MAIN_VARIANT (type
)
4500 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr
, 0)))
4501 || (ecode
== RECORD_TYPE
4502 && TYPE_NAME (etype
)
4503 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
4504 return convert (type
, TREE_OPERAND (expr
, 0));
4506 /* If the inner type is of self-referential size and the expression type
4507 is a record, do this as an unchecked conversion. But first pad the
4508 expression if possible to have the same size on both sides. */
4509 if (ecode
== RECORD_TYPE
4510 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
4512 if (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
)
4513 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4514 false, false, false, true),
4516 return unchecked_convert (type
, expr
, false);
4519 /* If we are converting between array types with variable size, do the
4520 final conversion as an unchecked conversion, again to avoid the need
4521 for some variable-sized temporaries. If valid, this conversion is
4522 very likely purely technical and without real effects. */
4523 if (ecode
== ARRAY_TYPE
4524 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == ARRAY_TYPE
4525 && !TREE_CONSTANT (TYPE_SIZE (etype
))
4526 && !TREE_CONSTANT (TYPE_SIZE (type
)))
4527 return unchecked_convert (type
,
4528 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4533 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4534 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
));
4535 return gnat_build_constructor (type
, v
);
4538 /* If the input type has padding, remove it and convert to the output type.
4539 The conditions ordering is arranged to ensure that the output type is not
4540 a padding type here, as it is not clear whether the conversion would
4541 always be correct if this was to happen. */
4542 else if (ecode
== RECORD_TYPE
&& TYPE_PADDING_P (etype
))
4546 /* If we have just converted to this padded type, just get the
4547 inner expression. */
4548 if (TREE_CODE (expr
) == CONSTRUCTOR
4549 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr
))
4550 && (*CONSTRUCTOR_ELTS (expr
))[0].index
== TYPE_FIELDS (etype
))
4551 unpadded
= (*CONSTRUCTOR_ELTS (expr
))[0].value
;
4553 /* Otherwise, build an explicit component reference. */
4556 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4558 return convert (type
, unpadded
);
4561 /* If the input is a biased type, adjust first. */
4562 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
4563 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
4564 fold_convert (TREE_TYPE (etype
),
4566 TYPE_MIN_VALUE (etype
)));
4568 /* If the input is a justified modular type, we need to extract the actual
4569 object before converting it to any other type with the exceptions of an
4570 unconstrained array or of a mere type variant. It is useful to avoid the
4571 extraction and conversion in the type variant case because it could end
4572 up replacing a VAR_DECL expr by a constructor and we might be about the
4573 take the address of the result. */
4574 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
4575 && code
!= UNCONSTRAINED_ARRAY_TYPE
4576 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
4577 return convert (type
, build_component_ref (expr
, NULL_TREE
,
4578 TYPE_FIELDS (etype
), false));
4580 /* If converting to a type that contains a template, convert to the data
4581 type and then build the template. */
4582 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
4584 tree obj_type
= TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type
)));
4585 vec
<constructor_elt
, va_gc
> *v
;
4588 /* If the source already has a template, get a reference to the
4589 associated array only, as we are going to rebuild a template
4590 for the target type anyway. */
4591 expr
= maybe_unconstrained_array (expr
);
4593 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4594 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
4595 obj_type
, NULL_TREE
));
4596 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (type
)),
4597 convert (obj_type
, expr
));
4598 return gnat_build_constructor (type
, v
);
4601 /* There are some cases of expressions that we process specially. */
4602 switch (TREE_CODE (expr
))
4608 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4609 conversion in gnat_expand_expr. NULL_EXPR does not represent
4610 and actual value, so no conversion is needed. */
4611 expr
= copy_node (expr
);
4612 TREE_TYPE (expr
) = type
;
4616 /* If we are converting a STRING_CST to another constrained array type,
4617 just make a new one in the proper type. */
4618 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
4619 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
4620 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
4622 expr
= copy_node (expr
);
4623 TREE_TYPE (expr
) = type
;
4629 /* If we are converting a VECTOR_CST to a mere variant type, just make
4630 a new one in the proper type. */
4631 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4633 expr
= copy_node (expr
);
4634 TREE_TYPE (expr
) = type
;
4639 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4640 a new one in the proper type. */
4641 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
4643 expr
= copy_node (expr
);
4644 TREE_TYPE (expr
) = type
;
4645 CONSTRUCTOR_ELTS (expr
) = vec_safe_copy (CONSTRUCTOR_ELTS (expr
));
4649 /* Likewise for a conversion between original and packable version, or
4650 conversion between types of the same size and with the same list of
4651 fields, but we have to work harder to preserve type consistency. */
4653 && code
== RECORD_TYPE
4654 && (TYPE_NAME (type
) == TYPE_NAME (etype
)
4655 || tree_int_cst_equal (TYPE_SIZE (type
), TYPE_SIZE (etype
))))
4658 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4659 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4660 vec
<constructor_elt
, va_gc
> *v
;
4662 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
4663 unsigned HOST_WIDE_INT idx
;
4666 /* Whether we need to clear TREE_CONSTANT et al. on the output
4667 constructor when we convert in place. */
4668 bool clear_constant
= false;
4670 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
4672 /* We expect only simple constructors. */
4673 if (!SAME_FIELD_P (index
, efield
))
4675 /* The field must be the same. */
4676 if (!SAME_FIELD_P (efield
, field
))
4678 constructor_elt elt
= {field
, convert (TREE_TYPE (field
), value
)};
4679 v
->quick_push (elt
);
4681 /* If packing has made this field a bitfield and the input
4682 value couldn't be emitted statically any more, we need to
4683 clear TREE_CONSTANT on our output. */
4685 && TREE_CONSTANT (expr
)
4686 && !CONSTRUCTOR_BITFIELD_P (efield
)
4687 && CONSTRUCTOR_BITFIELD_P (field
)
4688 && !initializer_constant_valid_for_bitfield_p (value
))
4689 clear_constant
= true;
4691 efield
= DECL_CHAIN (efield
);
4692 field
= DECL_CHAIN (field
);
4695 /* If we have been able to match and convert all the input fields
4696 to their output type, convert in place now. We'll fallback to a
4697 view conversion downstream otherwise. */
4700 expr
= copy_node (expr
);
4701 TREE_TYPE (expr
) = type
;
4702 CONSTRUCTOR_ELTS (expr
) = v
;
4704 TREE_CONSTANT (expr
) = TREE_STATIC (expr
) = 0;
4709 /* Likewise for a conversion between array type and vector type with a
4710 compatible representative array. */
4711 else if (code
== VECTOR_TYPE
4712 && ecode
== ARRAY_TYPE
4713 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4716 vec
<constructor_elt
, va_gc
> *e
= CONSTRUCTOR_ELTS (expr
);
4717 unsigned HOST_WIDE_INT len
= vec_safe_length (e
);
4718 vec
<constructor_elt
, va_gc
> *v
;
4719 unsigned HOST_WIDE_INT ix
;
4722 /* Build a VECTOR_CST from a *constant* array constructor. */
4723 if (TREE_CONSTANT (expr
))
4725 bool constant_p
= true;
4727 /* Iterate through elements and check if all constructor
4728 elements are *_CSTs. */
4729 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4730 if (!CONSTANT_CLASS_P (value
))
4737 return build_vector_from_ctor (type
,
4738 CONSTRUCTOR_ELTS (expr
));
4741 /* Otherwise, build a regular vector constructor. */
4743 FOR_EACH_CONSTRUCTOR_VALUE (e
, ix
, value
)
4745 constructor_elt elt
= {NULL_TREE
, value
};
4746 v
->quick_push (elt
);
4748 expr
= copy_node (expr
);
4749 TREE_TYPE (expr
) = type
;
4750 CONSTRUCTOR_ELTS (expr
) = v
;
4755 case UNCONSTRAINED_ARRAY_REF
:
4756 /* First retrieve the underlying array. */
4757 expr
= maybe_unconstrained_array (expr
);
4758 etype
= TREE_TYPE (expr
);
4759 ecode
= TREE_CODE (etype
);
4762 case VIEW_CONVERT_EXPR
:
4764 /* GCC 4.x is very sensitive to type consistency overall, and view
4765 conversions thus are very frequent. Even though just "convert"ing
4766 the inner operand to the output type is fine in most cases, it
4767 might expose unexpected input/output type mismatches in special
4768 circumstances so we avoid such recursive calls when we can. */
4769 tree op0
= TREE_OPERAND (expr
, 0);
4771 /* If we are converting back to the original type, we can just
4772 lift the input conversion. This is a common occurrence with
4773 switches back-and-forth amongst type variants. */
4774 if (type
== TREE_TYPE (op0
))
4777 /* Otherwise, if we're converting between two aggregate or vector
4778 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4779 target type in place or to just convert the inner expression. */
4780 if ((AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
4781 || (VECTOR_TYPE_P (type
) && VECTOR_TYPE_P (etype
)))
4783 /* If we are converting between mere variants, we can just
4784 substitute the VIEW_CONVERT_EXPR in place. */
4785 if (gnat_types_compatible_p (type
, etype
))
4786 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
4788 /* Otherwise, we may just bypass the input view conversion unless
4789 one of the types is a fat pointer, which is handled by
4790 specialized code below which relies on exact type matching. */
4791 else if (!TYPE_IS_FAT_POINTER_P (type
)
4792 && !TYPE_IS_FAT_POINTER_P (etype
))
4793 return convert (type
, op0
);
4803 /* Check for converting to a pointer to an unconstrained array. */
4804 if (TYPE_IS_FAT_POINTER_P (type
) && !TYPE_IS_FAT_POINTER_P (etype
))
4805 return convert_to_fat_pointer (type
, expr
);
4807 /* If we are converting between two aggregate or vector types that are mere
4808 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4809 to a vector type from its representative array type. */
4810 else if ((code
== ecode
4811 && (AGGREGATE_TYPE_P (type
) || VECTOR_TYPE_P (type
))
4812 && gnat_types_compatible_p (type
, etype
))
4813 || (code
== VECTOR_TYPE
4814 && ecode
== ARRAY_TYPE
4815 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
4817 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4819 /* If we are converting between tagged types, try to upcast properly. */
4820 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4821 && TYPE_ALIGN_OK (etype
) && TYPE_ALIGN_OK (type
))
4823 tree child_etype
= etype
;
4825 tree field
= TYPE_FIELDS (child_etype
);
4826 if (DECL_NAME (field
) == parent_name_id
&& TREE_TYPE (field
) == type
)
4827 return build_component_ref (expr
, NULL_TREE
, field
, false);
4828 child_etype
= TREE_TYPE (field
);
4829 } while (TREE_CODE (child_etype
) == RECORD_TYPE
);
4832 /* If we are converting from a smaller form of record type back to it, just
4833 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4834 size on both sides. */
4835 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
4836 && smaller_form_type_p (etype
, type
))
4838 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
4839 false, false, false, true),
4841 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4844 /* In all other cases of related types, make a NOP_EXPR. */
4845 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
4846 return fold_convert (type
, expr
);
4851 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4854 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4855 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4856 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4857 return unchecked_convert (type
, expr
, false);
4858 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4859 return fold_convert (type
,
4860 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4861 convert (TREE_TYPE (type
), expr
),
4862 TYPE_MIN_VALUE (type
)));
4864 /* ... fall through ... */
4868 /* If we are converting an additive expression to an integer type
4869 with lower precision, be wary of the optimization that can be
4870 applied by convert_to_integer. There are 2 problematic cases:
4871 - if the first operand was originally of a biased type,
4872 because we could be recursively called to convert it
4873 to an intermediate type and thus rematerialize the
4874 additive operator endlessly,
4875 - if the expression contains a placeholder, because an
4876 intermediate conversion that changes the sign could
4877 be inserted and thus introduce an artificial overflow
4878 at compile time when the placeholder is substituted. */
4879 if (code
== INTEGER_TYPE
4880 && ecode
== INTEGER_TYPE
4881 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4882 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4884 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4886 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4887 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4888 || CONTAINS_PLACEHOLDER_P (expr
))
4889 return build1 (NOP_EXPR
, type
, expr
);
4892 return fold (convert_to_integer (type
, expr
));
4895 case REFERENCE_TYPE
:
4896 /* If converting between two thin pointers, adjust if needed to account
4897 for differing offsets from the base pointer, depending on whether
4898 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4899 if (TYPE_IS_THIN_POINTER_P (etype
) && TYPE_IS_THIN_POINTER_P (type
))
4902 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype
)) != NULL_TREE
4903 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype
))))
4906 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != NULL_TREE
4907 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type
))))
4909 tree byte_diff
= size_diffop (type_pos
, etype_pos
);
4911 expr
= build1 (NOP_EXPR
, type
, expr
);
4912 if (integer_zerop (byte_diff
))
4915 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4916 fold_convert (sizetype
, byte_diff
));
4919 /* If converting fat pointer to normal or thin pointer, get the pointer
4920 to the array and then convert it. */
4921 if (TYPE_IS_FAT_POINTER_P (etype
))
4923 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
4925 return fold (convert_to_pointer (type
, expr
));
4928 return fold (convert_to_real (type
, expr
));
4931 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4933 vec
<constructor_elt
, va_gc
> *v
;
4936 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (type
),
4937 convert (TREE_TYPE (TYPE_FIELDS (type
)),
4939 return gnat_build_constructor (type
, v
);
4942 /* ... fall through ... */
4945 /* In these cases, assume the front-end has validated the conversion.
4946 If the conversion is valid, it will be a bit-wise conversion, so
4947 it can be viewed as an unchecked conversion. */
4948 return unchecked_convert (type
, expr
, false);
4951 /* This is a either a conversion between a tagged type and some
4952 subtype, which we have to mark as a UNION_TYPE because of
4953 overlapping fields or a conversion of an Unchecked_Union. */
4954 return unchecked_convert (type
, expr
, false);
4956 case UNCONSTRAINED_ARRAY_TYPE
:
4957 /* If the input is a VECTOR_TYPE, convert to the representative
4958 array type first. */
4959 if (ecode
== VECTOR_TYPE
)
4961 expr
= convert (TYPE_REPRESENTATIVE_ARRAY (etype
), expr
);
4962 etype
= TREE_TYPE (expr
);
4963 ecode
= TREE_CODE (etype
);
4966 /* If EXPR is a constrained array, take its address, convert it to a
4967 fat pointer, and then dereference it. Likewise if EXPR is a
4968 record containing both a template and a constrained array.
4969 Note that a record representing a justified modular type
4970 always represents a packed constrained array. */
4971 if (ecode
== ARRAY_TYPE
4972 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4973 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4974 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4977 (INDIRECT_REF
, NULL_TREE
,
4978 convert_to_fat_pointer (TREE_TYPE (type
),
4979 build_unary_op (ADDR_EXPR
,
4982 /* Do something very similar for converting one unconstrained
4983 array to another. */
4984 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4986 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4987 convert (TREE_TYPE (type
),
4988 build_unary_op (ADDR_EXPR
,
4994 return fold (convert_to_complex (type
, expr
));
5001 /* Create an expression whose value is that of EXPR converted to the common
5002 index type, which is sizetype. EXPR is supposed to be in the base type
5003 of the GNAT index type. Calling it is equivalent to doing
5005 convert (sizetype, expr)
5007 but we try to distribute the type conversion with the knowledge that EXPR
5008 cannot overflow in its type. This is a best-effort approach and we fall
5009 back to the above expression as soon as difficulties are encountered.
5011 This is necessary to overcome issues that arise when the GNAT base index
5012 type and the GCC common index type (sizetype) don't have the same size,
5013 which is quite frequent on 64-bit architectures. In this case, and if
5014 the GNAT base index type is signed but the iteration type of the loop has
5015 been forced to unsigned, the loop scalar evolution engine cannot compute
5016 a simple evolution for the general induction variables associated with the
5017 array indices, because it will preserve the wrap-around semantics in the
5018 unsigned type of their "inner" part. As a result, many loop optimizations
5021 The solution is to use a special (basic) induction variable that is at
5022 least as large as sizetype, and to express the aforementioned general
5023 induction variables in terms of this induction variable, eliminating
5024 the problematic intermediate truncation to the GNAT base index type.
5025 This is possible as long as the original expression doesn't overflow
5026 and if the middle-end hasn't introduced artificial overflows in the
5027 course of the various simplification it can make to the expression. */
5030 convert_to_index_type (tree expr
)
5032 enum tree_code code
= TREE_CODE (expr
);
5033 tree type
= TREE_TYPE (expr
);
5035 /* If the type is unsigned, overflow is allowed so we cannot be sure that
5036 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
5037 if (TYPE_UNSIGNED (type
) || !optimize
)
5038 return convert (sizetype
, expr
);
5043 /* The main effect of the function: replace a loop parameter with its
5044 associated special induction variable. */
5045 if (DECL_LOOP_PARM_P (expr
) && DECL_INDUCTION_VAR (expr
))
5046 expr
= DECL_INDUCTION_VAR (expr
);
5051 tree otype
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5052 /* Bail out as soon as we suspect some sort of type frobbing. */
5053 if (TYPE_PRECISION (type
) != TYPE_PRECISION (otype
)
5054 || TYPE_UNSIGNED (type
) != TYPE_UNSIGNED (otype
))
5058 /* ... fall through ... */
5060 case NON_LVALUE_EXPR
:
5061 return fold_build1 (code
, sizetype
,
5062 convert_to_index_type (TREE_OPERAND (expr
, 0)));
5067 return fold_build2 (code
, sizetype
,
5068 convert_to_index_type (TREE_OPERAND (expr
, 0)),
5069 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5072 return fold_build2 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5073 convert_to_index_type (TREE_OPERAND (expr
, 1)));
5076 return fold_build3 (code
, sizetype
, TREE_OPERAND (expr
, 0),
5077 convert_to_index_type (TREE_OPERAND (expr
, 1)),
5078 convert_to_index_type (TREE_OPERAND (expr
, 2)));
5084 return convert (sizetype
, expr
);
5087 /* Remove all conversions that are done in EXP. This includes converting
5088 from a padded type or to a justified modular type. If TRUE_ADDRESS
5089 is true, always return the address of the containing object even if
5090 the address is not bit-aligned. */
5093 remove_conversions (tree exp
, bool true_address
)
5095 switch (TREE_CODE (exp
))
5099 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
5100 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
5102 remove_conversions ((*CONSTRUCTOR_ELTS (exp
))[0].value
, true);
5106 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5107 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5111 case VIEW_CONVERT_EXPR
:
5112 case NON_LVALUE_EXPR
:
5113 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
5122 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5123 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5124 likewise return an expression pointing to the underlying array. */
5127 maybe_unconstrained_array (tree exp
)
5129 enum tree_code code
= TREE_CODE (exp
);
5130 tree type
= TREE_TYPE (exp
);
5132 switch (TREE_CODE (type
))
5134 case UNCONSTRAINED_ARRAY_TYPE
:
5135 if (code
== UNCONSTRAINED_ARRAY_REF
)
5137 const bool read_only
= TREE_READONLY (exp
);
5138 const bool no_trap
= TREE_THIS_NOTRAP (exp
);
5140 exp
= TREE_OPERAND (exp
, 0);
5141 type
= TREE_TYPE (exp
);
5143 if (TREE_CODE (exp
) == COND_EXPR
)
5146 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5147 build_component_ref (TREE_OPERAND (exp
, 1),
5152 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
5153 build_component_ref (TREE_OPERAND (exp
, 2),
5158 exp
= build3 (COND_EXPR
,
5159 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))),
5160 TREE_OPERAND (exp
, 0), op1
, op2
);
5164 exp
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5165 build_component_ref (exp
, NULL_TREE
,
5168 TREE_READONLY (exp
) = read_only
;
5169 TREE_THIS_NOTRAP (exp
) = no_trap
;
5173 else if (code
== NULL_EXPR
)
5174 exp
= build1 (NULL_EXPR
,
5175 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
)))),
5176 TREE_OPERAND (exp
, 0));
5180 /* If this is a padded type and it contains a template, convert to the
5181 unpadded type first. */
5182 if (TYPE_PADDING_P (type
)
5183 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
5184 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
5186 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5187 type
= TREE_TYPE (exp
);
5190 if (TYPE_CONTAINS_TEMPLATE_P (type
))
5192 exp
= build_component_ref (exp
, NULL_TREE
,
5193 DECL_CHAIN (TYPE_FIELDS (type
)),
5195 type
= TREE_TYPE (exp
);
5197 /* If the array type is padded, convert to the unpadded type. */
5198 if (TYPE_IS_PADDING_P (type
))
5199 exp
= convert (TREE_TYPE (TYPE_FIELDS (type
)), exp
);
5210 /* Return true if EXPR is an expression that can be folded as an operand
5211 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5214 can_fold_for_view_convert_p (tree expr
)
5218 /* The folder will fold NOP_EXPRs between integral types with the same
5219 precision (in the middle-end's sense). We cannot allow it if the
5220 types don't have the same precision in the Ada sense as well. */
5221 if (TREE_CODE (expr
) != NOP_EXPR
)
5224 t1
= TREE_TYPE (expr
);
5225 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
5227 /* Defer to the folder for non-integral conversions. */
5228 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
5231 /* Only fold conversions that preserve both precisions. */
5232 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
5233 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
5239 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5240 If NOTRUNC_P is true, truncation operations should be suppressed.
5242 Special care is required with (source or target) integral types whose
5243 precision is not equal to their size, to make sure we fetch or assign
5244 the value bits whose location might depend on the endianness, e.g.
5246 Rmsize : constant := 8;
5247 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5249 type Bit_Array is array (1 .. Rmsize) of Boolean;
5250 pragma Pack (Bit_Array);
5252 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5254 Value : Int := 2#1000_0001#;
5255 Vbits : Bit_Array := To_Bit_Array (Value);
5257 we expect the 8 bits at Vbits'Address to always contain Value, while
5258 their original location depends on the endianness, at Value'Address
5259 on a little-endian architecture but not on a big-endian one. */
5262 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
5264 tree etype
= TREE_TYPE (expr
);
5265 enum tree_code ecode
= TREE_CODE (etype
);
5266 enum tree_code code
= TREE_CODE (type
);
5269 /* If the expression is already of the right type, we are done. */
5273 /* If both types types are integral just do a normal conversion.
5274 Likewise for a conversion to an unconstrained array. */
5275 if ((((INTEGRAL_TYPE_P (type
)
5276 && !(code
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (type
)))
5277 || (POINTER_TYPE_P (type
) && !TYPE_IS_THIN_POINTER_P (type
))
5278 || (code
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (type
)))
5279 && ((INTEGRAL_TYPE_P (etype
)
5280 && !(ecode
== INTEGER_TYPE
&& TYPE_VAX_FLOATING_POINT_P (etype
)))
5281 || (POINTER_TYPE_P (etype
) && !TYPE_IS_THIN_POINTER_P (etype
))
5282 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
))))
5283 || code
== UNCONSTRAINED_ARRAY_TYPE
)
5285 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
5287 tree ntype
= copy_type (etype
);
5288 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
5289 TYPE_MAIN_VARIANT (ntype
) = ntype
;
5290 expr
= build1 (NOP_EXPR
, ntype
, expr
);
5293 if (code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5295 tree rtype
= copy_type (type
);
5296 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
5297 TYPE_MAIN_VARIANT (rtype
) = rtype
;
5298 expr
= convert (rtype
, expr
);
5299 expr
= build1 (NOP_EXPR
, type
, expr
);
5302 expr
= convert (type
, expr
);
5305 /* If we are converting to an integral type whose precision is not equal
5306 to its size, first unchecked convert to a record type that contains an
5307 field of the given precision. Then extract the field. */
5308 else if (INTEGRAL_TYPE_P (type
)
5309 && TYPE_RM_SIZE (type
)
5310 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5311 GET_MODE_BITSIZE (TYPE_MODE (type
))))
5313 tree rec_type
= make_node (RECORD_TYPE
);
5314 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (type
));
5315 tree field_type
, field
;
5317 if (TYPE_UNSIGNED (type
))
5318 field_type
= make_unsigned_type (prec
);
5320 field_type
= make_signed_type (prec
);
5321 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (type
));
5323 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5324 NULL_TREE
, NULL_TREE
, 1, 0);
5326 TYPE_FIELDS (rec_type
) = field
;
5327 layout_type (rec_type
);
5329 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5330 expr
= build_component_ref (expr
, NULL_TREE
, field
, false);
5331 expr
= fold_build1 (NOP_EXPR
, type
, expr
);
5334 /* Similarly if we are converting from an integral type whose precision is
5335 not equal to its size, first copy into a field of the given precision
5336 and unchecked convert the record type. */
5337 else if (INTEGRAL_TYPE_P (etype
)
5338 && TYPE_RM_SIZE (etype
)
5339 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
5340 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
5342 tree rec_type
= make_node (RECORD_TYPE
);
5343 unsigned HOST_WIDE_INT prec
= TREE_INT_CST_LOW (TYPE_RM_SIZE (etype
));
5344 vec
<constructor_elt
, va_gc
> *v
;
5346 tree field_type
, field
;
5348 if (TYPE_UNSIGNED (etype
))
5349 field_type
= make_unsigned_type (prec
);
5351 field_type
= make_signed_type (prec
);
5352 SET_TYPE_RM_SIZE (field_type
, TYPE_RM_SIZE (etype
));
5354 field
= create_field_decl (get_identifier ("OBJ"), field_type
, rec_type
,
5355 NULL_TREE
, NULL_TREE
, 1, 0);
5357 TYPE_FIELDS (rec_type
) = field
;
5358 layout_type (rec_type
);
5360 expr
= fold_build1 (NOP_EXPR
, field_type
, expr
);
5361 CONSTRUCTOR_APPEND_ELT (v
, field
, expr
);
5362 expr
= gnat_build_constructor (rec_type
, v
);
5363 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5366 /* If we are converting from a scalar type to a type with a different size,
5367 we need to pad to have the same size on both sides.
5369 ??? We cannot do it unconditionally because unchecked conversions are
5370 used liberally by the front-end to implement polymorphism, e.g. in:
5372 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5373 return p___size__4 (p__object!(S191s.all));
5375 so we skip all expressions that are references. */
5376 else if (!REFERENCE_CLASS_P (expr
)
5377 && !AGGREGATE_TYPE_P (etype
)
5378 && TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
5379 && (c
= tree_int_cst_compare (TYPE_SIZE (etype
), TYPE_SIZE (type
))))
5383 expr
= convert (maybe_pad_type (etype
, TYPE_SIZE (type
), 0, Empty
,
5384 false, false, false, true),
5386 expr
= unchecked_convert (type
, expr
, notrunc_p
);
5390 tree rec_type
= maybe_pad_type (type
, TYPE_SIZE (etype
), 0, Empty
,
5391 false, false, false, true);
5392 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
5393 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (rec_type
),
5398 /* We have a special case when we are converting between two unconstrained
5399 array types. In that case, take the address, convert the fat pointer
5400 types, and dereference. */
5401 else if (ecode
== code
&& code
== UNCONSTRAINED_ARRAY_TYPE
)
5402 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
5403 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
5404 build_unary_op (ADDR_EXPR
, NULL_TREE
,
5407 /* Another special case is when we are converting to a vector type from its
5408 representative array type; this a regular conversion. */
5409 else if (code
== VECTOR_TYPE
5410 && ecode
== ARRAY_TYPE
5411 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type
),
5413 expr
= convert (type
, expr
);
5417 expr
= maybe_unconstrained_array (expr
);
5418 etype
= TREE_TYPE (expr
);
5419 ecode
= TREE_CODE (etype
);
5420 if (can_fold_for_view_convert_p (expr
))
5421 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5423 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
5426 /* If the result is an integral type whose precision is not equal to its
5427 size, sign- or zero-extend the result. We need not do this if the input
5428 is an integral type of the same precision and signedness or if the output
5429 is a biased type or if both the input and output are unsigned. */
5431 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
5432 && !(code
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (type
))
5433 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
5434 GET_MODE_BITSIZE (TYPE_MODE (type
)))
5435 && !(INTEGRAL_TYPE_P (etype
)
5436 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
5437 && operand_equal_p (TYPE_RM_SIZE (type
),
5438 (TYPE_RM_SIZE (etype
) != 0
5439 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
5441 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
5444 = gnat_type_for_mode (TYPE_MODE (type
), TYPE_UNSIGNED (type
));
5446 = convert (base_type
,
5447 size_binop (MINUS_EXPR
,
5449 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
5450 TYPE_RM_SIZE (type
)));
5453 build_binary_op (RSHIFT_EXPR
, base_type
,
5454 build_binary_op (LSHIFT_EXPR
, base_type
,
5455 convert (base_type
, expr
),
5460 /* An unchecked conversion should never raise Constraint_Error. The code
5461 below assumes that GCC's conversion routines overflow the same way that
5462 the underlying hardware does. This is probably true. In the rare case
5463 when it is false, we can rely on the fact that such conversions are
5464 erroneous anyway. */
5465 if (TREE_CODE (expr
) == INTEGER_CST
)
5466 TREE_OVERFLOW (expr
) = 0;
5468 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5469 show no longer constant. */
5470 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
5471 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
5473 TREE_CONSTANT (expr
) = 0;
5478 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5479 the latter being a record type as predicated by Is_Record_Type. */
5482 tree_code_for_record_type (Entity_Id gnat_type
)
5484 Node_Id component_list
, component
;
5486 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5487 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5488 if (!Is_Unchecked_Union (gnat_type
))
5491 gnat_type
= Implementation_Base_Type (gnat_type
);
5493 = Component_List (Type_Definition (Declaration_Node (gnat_type
)));
5495 for (component
= First_Non_Pragma (Component_Items (component_list
));
5496 Present (component
);
5497 component
= Next_Non_Pragma (component
))
5498 if (Ekind (Defining_Entity (component
)) == E_Component
)
5504 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5505 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5506 according to the presence of an alignment clause on the type or, if it
5507 is an array, on the component type. */
5510 is_double_float_or_array (Entity_Id gnat_type
, bool *align_clause
)
5512 gnat_type
= Underlying_Type (gnat_type
);
5514 *align_clause
= Present (Alignment_Clause (gnat_type
));
5516 if (Is_Array_Type (gnat_type
))
5518 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5519 if (Present (Alignment_Clause (gnat_type
)))
5520 *align_clause
= true;
5523 if (!Is_Floating_Point_Type (gnat_type
))
5526 if (UI_To_Int (Esize (gnat_type
)) != 64)
5532 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5533 size is greater or equal to 64 bits, or an array of such a type. Set
5534 ALIGN_CLAUSE according to the presence of an alignment clause on the
5535 type or, if it is an array, on the component type. */
5538 is_double_scalar_or_array (Entity_Id gnat_type
, bool *align_clause
)
5540 gnat_type
= Underlying_Type (gnat_type
);
5542 *align_clause
= Present (Alignment_Clause (gnat_type
));
5544 if (Is_Array_Type (gnat_type
))
5546 gnat_type
= Underlying_Type (Component_Type (gnat_type
));
5547 if (Present (Alignment_Clause (gnat_type
)))
5548 *align_clause
= true;
5551 if (!Is_Scalar_Type (gnat_type
))
5554 if (UI_To_Int (Esize (gnat_type
)) < 64)
5560 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5561 component of an aggregate type. */
5564 type_for_nonaliased_component_p (tree gnu_type
)
5566 /* If the type is passed by reference, we may have pointers to the
5567 component so it cannot be made non-aliased. */
5568 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
5571 /* We used to say that any component of aggregate type is aliased
5572 because the front-end may take 'Reference of it. The front-end
5573 has been enhanced in the meantime so as to use a renaming instead
5574 in most cases, but the back-end can probably take the address of
5575 such a component too so we go for the conservative stance.
5577 For instance, we might need the address of any array type, even
5578 if normally passed by copy, to construct a fat pointer if the
5579 component is used as an actual for an unconstrained formal.
5581 Likewise for record types: even if a specific record subtype is
5582 passed by copy, the parent type might be passed by ref (e.g. if
5583 it's of variable size) and we might take the address of a child
5584 component to pass to a parent formal. We have no way to check
5585 for such conditions here. */
5586 if (AGGREGATE_TYPE_P (gnu_type
))
5592 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5595 smaller_form_type_p (tree type
, tree orig_type
)
5599 /* We're not interested in variants here. */
5600 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (orig_type
))
5603 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5604 if (TYPE_NAME (type
) != TYPE_NAME (orig_type
))
5607 size
= TYPE_SIZE (type
);
5608 osize
= TYPE_SIZE (orig_type
);
5610 if (!(TREE_CODE (size
) == INTEGER_CST
&& TREE_CODE (osize
) == INTEGER_CST
))
5613 return tree_int_cst_lt (size
, osize
) != 0;
5616 /* Perform final processing on global variables. */
5618 static GTY (()) tree dummy_global
;
5621 gnat_write_global_declarations (void)
5626 /* If we have declared types as used at the global level, insert them in
5627 the global hash table. We use a dummy variable for this purpose. */
5628 if (types_used_by_cur_var_decl
&& !types_used_by_cur_var_decl
->is_empty ())
5630 struct varpool_node
*node
;
5633 ASM_FORMAT_PRIVATE_NAME (label
, first_global_object_name
, 0);
5635 = build_decl (BUILTINS_LOCATION
, VAR_DECL
, get_identifier (label
),
5637 TREE_STATIC (dummy_global
) = 1;
5638 TREE_ASM_WRITTEN (dummy_global
) = 1;
5639 node
= varpool_node_for_decl (dummy_global
);
5640 node
->force_output
= 1;
5642 while (!types_used_by_cur_var_decl
->is_empty ())
5644 tree t
= types_used_by_cur_var_decl
->pop ();
5645 types_used_by_var_decl_insert (t
, dummy_global
);
5649 /* Output debug information for all global type declarations first. This
5650 ensures that global types whose compilation hasn't been finalized yet,
5651 for example pointers to Taft amendment types, have their compilation
5652 finalized in the right context. */
5653 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5654 if (TREE_CODE (iter
) == TYPE_DECL
)
5655 debug_hooks
->global_decl (iter
);
5657 /* Proceed to optimize and emit assembly. */
5658 finalize_compilation_unit ();
5660 /* After cgraph has had a chance to emit everything that's going to
5661 be emitted, output debug information for the rest of globals. */
5664 timevar_push (TV_SYMOUT
);
5665 FOR_EACH_VEC_SAFE_ELT (global_decls
, i
, iter
)
5666 if (TREE_CODE (iter
) != TYPE_DECL
)
5667 debug_hooks
->global_decl (iter
);
5668 timevar_pop (TV_SYMOUT
);
5672 /* ************************************************************************
5673 * * GCC builtins support *
5674 * ************************************************************************ */
5676 /* The general scheme is fairly simple:
5678 For each builtin function/type to be declared, gnat_install_builtins calls
5679 internal facilities which eventually get to gnat_push_decl, which in turn
5680 tracks the so declared builtin function decls in the 'builtin_decls' global
5681 datastructure. When an Intrinsic subprogram declaration is processed, we
5682 search this global datastructure to retrieve the associated BUILT_IN DECL
5685 /* Search the chain of currently available builtin declarations for a node
5686 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5687 found, if any, or NULL_TREE otherwise. */
5689 builtin_decl_for (tree name
)
5694 FOR_EACH_VEC_SAFE_ELT (builtin_decls
, i
, decl
)
5695 if (DECL_NAME (decl
) == name
)
5701 /* The code below eventually exposes gnat_install_builtins, which declares
5702 the builtin types and functions we might need, either internally or as
5703 user accessible facilities.
5705 ??? This is a first implementation shot, still in rough shape. It is
5706 heavily inspired from the "C" family implementation, with chunks copied
5707 verbatim from there.
5709 Two obvious TODO candidates are
5710 o Use a more efficient name/decl mapping scheme
5711 o Devise a middle-end infrastructure to avoid having to copy
5712 pieces between front-ends. */
5714 /* ----------------------------------------------------------------------- *
5715 * BUILTIN ELEMENTARY TYPES *
5716 * ----------------------------------------------------------------------- */
5718 /* Standard data types to be used in builtin argument declarations. */
5722 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
5724 CTI_CONST_STRING_TYPE
,
5729 static tree c_global_trees
[CTI_MAX
];
5731 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5732 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5733 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5735 /* ??? In addition some attribute handlers, we currently don't support a
5736 (small) number of builtin-types, which in turns inhibits support for a
5737 number of builtin functions. */
5738 #define wint_type_node void_type_node
5739 #define intmax_type_node void_type_node
5740 #define uintmax_type_node void_type_node
5742 /* Build the void_list_node (void_type_node having been created). */
5745 build_void_list_node (void)
5747 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
5751 /* Used to help initialize the builtin-types.def table. When a type of
5752 the correct size doesn't exist, use error_mark_node instead of NULL.
5753 The later results in segfaults even when a decl using the type doesn't
5757 builtin_type_for_size (int size
, bool unsignedp
)
5759 tree type
= gnat_type_for_size (size
, unsignedp
);
5760 return type
? type
: error_mark_node
;
5763 /* Build/push the elementary type decls that builtin functions/types
5767 install_builtin_elementary_types (void)
5769 signed_size_type_node
= gnat_signed_type (size_type_node
);
5770 pid_type_node
= integer_type_node
;
5771 void_list_node
= build_void_list_node ();
5773 string_type_node
= build_pointer_type (char_type_node
);
5774 const_string_type_node
5775 = build_pointer_type (build_qualified_type
5776 (char_type_node
, TYPE_QUAL_CONST
));
5779 /* ----------------------------------------------------------------------- *
5780 * BUILTIN FUNCTION TYPES *
5781 * ----------------------------------------------------------------------- */
5783 /* Now, builtin function types per se. */
5787 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5788 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5789 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5790 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5791 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5792 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5793 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5794 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5795 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5796 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7, ARG8) NAME,
5797 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5798 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5799 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5800 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5801 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5802 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5804 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5805 #include "builtin-types.def"
5806 #undef DEF_PRIMITIVE_TYPE
5807 #undef DEF_FUNCTION_TYPE_0
5808 #undef DEF_FUNCTION_TYPE_1
5809 #undef DEF_FUNCTION_TYPE_2
5810 #undef DEF_FUNCTION_TYPE_3
5811 #undef DEF_FUNCTION_TYPE_4
5812 #undef DEF_FUNCTION_TYPE_5
5813 #undef DEF_FUNCTION_TYPE_6
5814 #undef DEF_FUNCTION_TYPE_7
5815 #undef DEF_FUNCTION_TYPE_8
5816 #undef DEF_FUNCTION_TYPE_VAR_0
5817 #undef DEF_FUNCTION_TYPE_VAR_1
5818 #undef DEF_FUNCTION_TYPE_VAR_2
5819 #undef DEF_FUNCTION_TYPE_VAR_3
5820 #undef DEF_FUNCTION_TYPE_VAR_4
5821 #undef DEF_FUNCTION_TYPE_VAR_5
5822 #undef DEF_POINTER_TYPE
5826 typedef enum c_builtin_type builtin_type
;
5828 /* A temporary array used in communication with def_fn_type. */
5829 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
5831 /* A helper function for install_builtin_types. Build function type
5832 for DEF with return type RET and N arguments. If VAR is true, then the
5833 function should be variadic after those N arguments.
5835 Takes special care not to ICE if any of the types involved are
5836 error_mark_node, which indicates that said type is not in fact available
5837 (see builtin_type_for_size). In which case the function type as a whole
5838 should be error_mark_node. */
5841 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
5844 tree
*args
= XALLOCAVEC (tree
, n
);
5849 for (i
= 0; i
< n
; ++i
)
5851 builtin_type a
= (builtin_type
) va_arg (list
, int);
5852 t
= builtin_types
[a
];
5853 if (t
== error_mark_node
)
5858 t
= builtin_types
[ret
];
5859 if (t
== error_mark_node
)
5862 t
= build_varargs_function_type_array (t
, n
, args
);
5864 t
= build_function_type_array (t
, n
, args
);
5867 builtin_types
[def
] = t
;
5871 /* Build the builtin function types and install them in the builtin_types
5872 array for later use in builtin function decls. */
5875 install_builtin_function_types (void)
5877 tree va_list_ref_type_node
;
5878 tree va_list_arg_type_node
;
5880 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
5882 va_list_arg_type_node
= va_list_ref_type_node
=
5883 build_pointer_type (TREE_TYPE (va_list_type_node
));
5887 va_list_arg_type_node
= va_list_type_node
;
5888 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
5891 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5892 builtin_types[ENUM] = VALUE;
5893 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5894 def_fn_type (ENUM, RETURN, 0, 0);
5895 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5896 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5897 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5898 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5899 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5900 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5901 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5902 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5903 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5904 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5905 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5907 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5908 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5910 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5911 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5913 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5915 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5916 def_fn_type (ENUM, RETURN, 1, 0);
5917 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5918 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5919 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5920 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5921 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5922 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5923 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5924 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5925 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5926 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5927 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5928 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5930 #include "builtin-types.def"
5932 #undef DEF_PRIMITIVE_TYPE
5933 #undef DEF_FUNCTION_TYPE_1
5934 #undef DEF_FUNCTION_TYPE_2
5935 #undef DEF_FUNCTION_TYPE_3
5936 #undef DEF_FUNCTION_TYPE_4
5937 #undef DEF_FUNCTION_TYPE_5
5938 #undef DEF_FUNCTION_TYPE_6
5939 #undef DEF_FUNCTION_TYPE_VAR_0
5940 #undef DEF_FUNCTION_TYPE_VAR_1
5941 #undef DEF_FUNCTION_TYPE_VAR_2
5942 #undef DEF_FUNCTION_TYPE_VAR_3
5943 #undef DEF_FUNCTION_TYPE_VAR_4
5944 #undef DEF_FUNCTION_TYPE_VAR_5
5945 #undef DEF_POINTER_TYPE
5946 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
5949 /* ----------------------------------------------------------------------- *
5950 * BUILTIN ATTRIBUTES *
5951 * ----------------------------------------------------------------------- */
5953 enum built_in_attribute
5955 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5956 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5957 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5958 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5959 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5960 #include "builtin-attrs.def"
5961 #undef DEF_ATTR_NULL_TREE
5963 #undef DEF_ATTR_STRING
5964 #undef DEF_ATTR_IDENT
5965 #undef DEF_ATTR_TREE_LIST
5969 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
5972 install_builtin_attributes (void)
5974 /* Fill in the built_in_attributes array. */
5975 #define DEF_ATTR_NULL_TREE(ENUM) \
5976 built_in_attributes[(int) ENUM] = NULL_TREE;
5977 #define DEF_ATTR_INT(ENUM, VALUE) \
5978 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5979 #define DEF_ATTR_STRING(ENUM, VALUE) \
5980 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5981 #define DEF_ATTR_IDENT(ENUM, STRING) \
5982 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5983 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5984 built_in_attributes[(int) ENUM] \
5985 = tree_cons (built_in_attributes[(int) PURPOSE], \
5986 built_in_attributes[(int) VALUE], \
5987 built_in_attributes[(int) CHAIN]);
5988 #include "builtin-attrs.def"
5989 #undef DEF_ATTR_NULL_TREE
5991 #undef DEF_ATTR_STRING
5992 #undef DEF_ATTR_IDENT
5993 #undef DEF_ATTR_TREE_LIST
5996 /* Handle a "const" attribute; arguments as in
5997 struct attribute_spec.handler. */
6000 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6001 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6004 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6005 TREE_READONLY (*node
) = 1;
6007 *no_add_attrs
= true;
6012 /* Handle a "nothrow" attribute; arguments as in
6013 struct attribute_spec.handler. */
6016 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6017 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6020 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6021 TREE_NOTHROW (*node
) = 1;
6023 *no_add_attrs
= true;
6028 /* Handle a "pure" attribute; arguments as in
6029 struct attribute_spec.handler. */
6032 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6033 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6035 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6036 DECL_PURE_P (*node
) = 1;
6037 /* ??? TODO: Support types. */
6040 warning (OPT_Wattributes
, "%qs attribute ignored",
6041 IDENTIFIER_POINTER (name
));
6042 *no_add_attrs
= true;
6048 /* Handle a "no vops" attribute; arguments as in
6049 struct attribute_spec.handler. */
6052 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6053 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6054 bool *ARG_UNUSED (no_add_attrs
))
6056 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
6057 DECL_IS_NOVOPS (*node
) = 1;
6061 /* Helper for nonnull attribute handling; fetch the operand number
6062 from the attribute argument list. */
6065 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
6067 /* Verify the arg number is a constant. */
6068 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
6069 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
6072 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
6076 /* Handle the "nonnull" attribute. */
6078 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6079 tree args
, int ARG_UNUSED (flags
),
6083 unsigned HOST_WIDE_INT attr_arg_num
;
6085 /* If no arguments are specified, all pointer arguments should be
6086 non-null. Verify a full prototype is given so that the arguments
6087 will have the correct types when we actually check them later. */
6090 if (!prototype_p (type
))
6092 error ("nonnull attribute without arguments on a non-prototype");
6093 *no_add_attrs
= true;
6098 /* Argument list specified. Verify that each argument number references
6099 a pointer argument. */
6100 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
6102 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
6104 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
6106 error ("nonnull argument has invalid operand number (argument %lu)",
6107 (unsigned long) attr_arg_num
);
6108 *no_add_attrs
= true;
6112 if (prototype_p (type
))
6114 function_args_iterator iter
;
6117 function_args_iter_init (&iter
, type
);
6118 for (ck_num
= 1; ; ck_num
++, function_args_iter_next (&iter
))
6120 argument
= function_args_iter_cond (&iter
);
6121 if (!argument
|| ck_num
== arg_num
)
6126 || TREE_CODE (argument
) == VOID_TYPE
)
6128 error ("nonnull argument with out-of-range operand number "
6129 "(argument %lu, operand %lu)",
6130 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6131 *no_add_attrs
= true;
6135 if (TREE_CODE (argument
) != POINTER_TYPE
)
6137 error ("nonnull argument references non-pointer operand "
6138 "(argument %lu, operand %lu)",
6139 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
6140 *no_add_attrs
= true;
6149 /* Handle a "sentinel" attribute. */
6152 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
6153 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6155 if (!prototype_p (*node
))
6157 warning (OPT_Wattributes
,
6158 "%qs attribute requires prototypes with named arguments",
6159 IDENTIFIER_POINTER (name
));
6160 *no_add_attrs
= true;
6164 if (!stdarg_p (*node
))
6166 warning (OPT_Wattributes
,
6167 "%qs attribute only applies to variadic functions",
6168 IDENTIFIER_POINTER (name
));
6169 *no_add_attrs
= true;
6175 tree position
= TREE_VALUE (args
);
6177 if (TREE_CODE (position
) != INTEGER_CST
)
6179 warning (0, "requested position is not an integer constant");
6180 *no_add_attrs
= true;
6184 if (tree_int_cst_lt (position
, integer_zero_node
))
6186 warning (0, "requested position is less than zero");
6187 *no_add_attrs
= true;
6195 /* Handle a "noreturn" attribute; arguments as in
6196 struct attribute_spec.handler. */
6199 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6200 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6202 tree type
= TREE_TYPE (*node
);
6204 /* See FIXME comment in c_common_attribute_table. */
6205 if (TREE_CODE (*node
) == FUNCTION_DECL
)
6206 TREE_THIS_VOLATILE (*node
) = 1;
6207 else if (TREE_CODE (type
) == POINTER_TYPE
6208 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
6210 = build_pointer_type
6211 (build_type_variant (TREE_TYPE (type
),
6212 TYPE_READONLY (TREE_TYPE (type
)), 1));
6215 warning (OPT_Wattributes
, "%qs attribute ignored",
6216 IDENTIFIER_POINTER (name
));
6217 *no_add_attrs
= true;
6223 /* Handle a "leaf" attribute; arguments as in
6224 struct attribute_spec.handler. */
6227 handle_leaf_attribute (tree
*node
, tree name
,
6228 tree
ARG_UNUSED (args
),
6229 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6231 if (TREE_CODE (*node
) != FUNCTION_DECL
)
6233 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
6234 *no_add_attrs
= true;
6236 if (!TREE_PUBLIC (*node
))
6238 warning (OPT_Wattributes
, "%qE attribute has no effect", name
);
6239 *no_add_attrs
= true;
6245 /* Handle a "malloc" attribute; arguments as in
6246 struct attribute_spec.handler. */
6249 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6250 int ARG_UNUSED (flags
), bool *no_add_attrs
)
6252 if (TREE_CODE (*node
) == FUNCTION_DECL
6253 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
6254 DECL_IS_MALLOC (*node
) = 1;
6257 warning (OPT_Wattributes
, "%qs attribute ignored",
6258 IDENTIFIER_POINTER (name
));
6259 *no_add_attrs
= true;
6265 /* Fake handler for attributes we don't properly support. */
6268 fake_attribute_handler (tree
* ARG_UNUSED (node
),
6269 tree
ARG_UNUSED (name
),
6270 tree
ARG_UNUSED (args
),
6271 int ARG_UNUSED (flags
),
6272 bool * ARG_UNUSED (no_add_attrs
))
6277 /* Handle a "type_generic" attribute. */
6280 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
6281 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
6282 bool * ARG_UNUSED (no_add_attrs
))
6284 /* Ensure we have a function type. */
6285 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
6287 /* Ensure we have a variadic function. */
6288 gcc_assert (!prototype_p (*node
) || stdarg_p (*node
));
6293 /* Handle a "vector_size" attribute; arguments as in
6294 struct attribute_spec.handler. */
6297 handle_vector_size_attribute (tree
*node
, tree name
, tree args
,
6298 int ARG_UNUSED (flags
),
6301 unsigned HOST_WIDE_INT vecsize
, nunits
;
6302 enum machine_mode orig_mode
;
6303 tree type
= *node
, new_type
, size
;
6305 *no_add_attrs
= true;
6307 size
= TREE_VALUE (args
);
6309 if (!tree_fits_uhwi_p (size
))
6311 warning (OPT_Wattributes
, "%qs attribute ignored",
6312 IDENTIFIER_POINTER (name
));
6316 /* Get the vector size (in bytes). */
6317 vecsize
= tree_to_uhwi (size
);
6319 /* We need to provide for vector pointers, vector arrays, and
6320 functions returning vectors. For example:
6322 __attribute__((vector_size(16))) short *foo;
6324 In this case, the mode is SI, but the type being modified is
6325 HI, so we need to look further. */
6327 while (POINTER_TYPE_P (type
)
6328 || TREE_CODE (type
) == FUNCTION_TYPE
6329 || TREE_CODE (type
) == ARRAY_TYPE
)
6330 type
= TREE_TYPE (type
);
6332 /* Get the mode of the type being modified. */
6333 orig_mode
= TYPE_MODE (type
);
6335 if ((!INTEGRAL_TYPE_P (type
)
6336 && !SCALAR_FLOAT_TYPE_P (type
)
6337 && !FIXED_POINT_TYPE_P (type
))
6338 || (!SCALAR_FLOAT_MODE_P (orig_mode
)
6339 && GET_MODE_CLASS (orig_mode
) != MODE_INT
6340 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode
))
6341 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
))
6342 || TREE_CODE (type
) == BOOLEAN_TYPE
)
6344 error ("invalid vector type for attribute %qs",
6345 IDENTIFIER_POINTER (name
));
6349 if (vecsize
% tree_to_uhwi (TYPE_SIZE_UNIT (type
)))
6351 error ("vector size not an integral multiple of component size");
6357 error ("zero vector size");
6361 /* Calculate how many units fit in the vector. */
6362 nunits
= vecsize
/ tree_to_uhwi (TYPE_SIZE_UNIT (type
));
6363 if (nunits
& (nunits
- 1))
6365 error ("number of components of the vector not a power of two");
6369 new_type
= build_vector_type (type
, nunits
);
6371 /* Build back pointers if needed. */
6372 *node
= reconstruct_complex_type (*node
, new_type
);
6377 /* Handle a "vector_type" attribute; arguments as in
6378 struct attribute_spec.handler. */
6381 handle_vector_type_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
6382 int ARG_UNUSED (flags
),
6385 /* Vector representative type and size. */
6386 tree rep_type
= *node
;
6387 tree rep_size
= TYPE_SIZE_UNIT (rep_type
);
6389 /* Vector size in bytes and number of units. */
6390 unsigned HOST_WIDE_INT vec_bytes
, vec_units
;
6392 /* Vector element type and mode. */
6394 enum machine_mode elem_mode
;
6396 *no_add_attrs
= true;
6398 if (TREE_CODE (rep_type
) != ARRAY_TYPE
)
6400 error ("attribute %qs applies to array types only",
6401 IDENTIFIER_POINTER (name
));
6405 /* Silently punt on variable sizes. We can't make vector types for them,
6406 need to ignore them on front-end generated subtypes of unconstrained
6407 bases, and this attribute is for binding implementors, not end-users, so
6408 we should never get there from legitimate explicit uses. */
6410 if (!tree_fits_uhwi_p (rep_size
))
6413 /* Get the element type/mode and check this is something we know
6414 how to make vectors of. */
6416 elem_type
= TREE_TYPE (rep_type
);
6417 elem_mode
= TYPE_MODE (elem_type
);
6419 if ((!INTEGRAL_TYPE_P (elem_type
)
6420 && !SCALAR_FLOAT_TYPE_P (elem_type
)
6421 && !FIXED_POINT_TYPE_P (elem_type
))
6422 || (!SCALAR_FLOAT_MODE_P (elem_mode
)
6423 && GET_MODE_CLASS (elem_mode
) != MODE_INT
6424 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode
))
6425 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (elem_type
)))
6427 error ("invalid element type for attribute %qs",
6428 IDENTIFIER_POINTER (name
));
6432 /* Sanity check the vector size and element type consistency. */
6434 vec_bytes
= tree_to_uhwi (rep_size
);
6436 if (vec_bytes
% tree_to_uhwi (TYPE_SIZE_UNIT (elem_type
)))
6438 error ("vector size not an integral multiple of component size");
6444 error ("zero vector size");
6448 vec_units
= vec_bytes
/ tree_to_uhwi (TYPE_SIZE_UNIT (elem_type
));
6449 if (vec_units
& (vec_units
- 1))
6451 error ("number of components of the vector not a power of two");
6455 /* Build the vector type and replace. */
6457 *node
= build_vector_type (elem_type
, vec_units
);
6458 TYPE_REPRESENTATIVE_ARRAY (*node
) = rep_type
;
6463 /* ----------------------------------------------------------------------- *
6464 * BUILTIN FUNCTIONS *
6465 * ----------------------------------------------------------------------- */
6467 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6468 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6469 if nonansi_p and flag_no_nonansi_builtin. */
6472 def_builtin_1 (enum built_in_function fncode
,
6474 enum built_in_class fnclass
,
6475 tree fntype
, tree libtype
,
6476 bool both_p
, bool fallback_p
,
6477 bool nonansi_p ATTRIBUTE_UNUSED
,
6478 tree fnattrs
, bool implicit_p
)
6481 const char *libname
;
6483 /* Preserve an already installed decl. It most likely was setup in advance
6484 (e.g. as part of the internal builtins) for specific reasons. */
6485 if (builtin_decl_explicit (fncode
) != NULL_TREE
)
6488 gcc_assert ((!both_p
&& !fallback_p
)
6489 || !strncmp (name
, "__builtin_",
6490 strlen ("__builtin_")));
6492 libname
= name
+ strlen ("__builtin_");
6493 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
6494 (fallback_p
? libname
: NULL
),
6497 /* ??? This is normally further controlled by command-line options
6498 like -fno-builtin, but we don't have them for Ada. */
6499 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
6502 set_builtin_decl (fncode
, decl
, implicit_p
);
6505 static int flag_isoc94
= 0;
6506 static int flag_isoc99
= 0;
6508 /* Install what the common builtins.def offers. */
6511 install_builtin_functions (void)
6513 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6514 NONANSI_P, ATTRS, IMPLICIT, COND) \
6516 def_builtin_1 (ENUM, NAME, CLASS, \
6517 builtin_types[(int) TYPE], \
6518 builtin_types[(int) LIBTYPE], \
6519 BOTH_P, FALLBACK_P, NONANSI_P, \
6520 built_in_attributes[(int) ATTRS], IMPLICIT);
6521 #include "builtins.def"
6525 /* ----------------------------------------------------------------------- *
6526 * BUILTIN FUNCTIONS *
6527 * ----------------------------------------------------------------------- */
6529 /* Install the builtin functions we might need. */
6532 gnat_install_builtins (void)
6534 install_builtin_elementary_types ();
6535 install_builtin_function_types ();
6536 install_builtin_attributes ();
6538 /* Install builtins used by generic middle-end pieces first. Some of these
6539 know about internal specificities and control attributes accordingly, for
6540 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6541 the generic definition from builtins.def. */
6542 build_common_builtin_nodes ();
6544 /* Now, install the target specific builtins, such as the AltiVec family on
6545 ppc, and the common set as exposed by builtins.def. */
6546 targetm
.init_builtins ();
6547 install_builtin_functions ();
6550 #include "gt-ada-utils.h"
6551 #include "gtype-ada.h"