1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
32 #include "coretypes.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
49 #include "tree-iterator.h"
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
74 /* If nonzero, pretend we are allocating at global level. */
77 /* Tree nodes for the various types and decls we create. */
78 tree gnat_std_decls
[(int) ADT_LAST
];
80 /* Functions to call for each of the possible raise reasons. */
81 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
83 /* Forward declarations for handlers of attributes. */
84 static tree
handle_const_attribute (tree
*, tree
, tree
, int, bool *);
85 static tree
handle_nothrow_attribute (tree
*, tree
, tree
, int, bool *);
86 static tree
handle_pure_attribute (tree
*, tree
, tree
, int, bool *);
87 static tree
handle_novops_attribute (tree
*, tree
, tree
, int, bool *);
88 static tree
handle_nonnull_attribute (tree
*, tree
, tree
, int, bool *);
89 static tree
handle_sentinel_attribute (tree
*, tree
, tree
, int, bool *);
90 static tree
handle_noreturn_attribute (tree
*, tree
, tree
, int, bool *);
91 static tree
handle_malloc_attribute (tree
*, tree
, tree
, int, bool *);
92 static tree
handle_type_generic_attribute (tree
*, tree
, tree
, int, bool *);
94 /* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96 static tree
fake_attribute_handler (tree
*, tree
, tree
, int, bool *);
98 /* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100 const struct attribute_spec gnat_internal_attribute_table
[] =
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute
},
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute
},
105 { "pure", 0, 0, true, false, false, handle_pure_attribute
},
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute
},
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute
},
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute
},
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute
},
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute
},
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute
},
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler
},
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler
},
119 { NULL
, 0, 0, false, false, false, NULL
}
122 /* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
127 #define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
130 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
133 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
136 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137 static GTY((length ("max_gnat_nodes"))) tree
*dummy_node_table
;
139 #define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
142 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
145 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
148 /* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
154 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
156 /* Likewise for float types, but record these by mode. */
157 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
159 /* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
162 struct GTY((chain_next ("%h.chain"))) gnat_binding_level
{
163 /* The binding level containing this one (the enclosing binding level). */
164 struct gnat_binding_level
*chain
;
165 /* The BLOCK node for this level. */
167 /* If nonzero, the setjmp buffer that needs to be updated for any
168 variable-sized definition within this context. */
172 /* The binding level currently in effect. */
173 static GTY(()) struct gnat_binding_level
*current_binding_level
;
175 /* A chain of gnat_binding_level structures awaiting reuse. */
176 static GTY((deletable
)) struct gnat_binding_level
*free_binding_level
;
178 /* An array of global declarations. */
179 static GTY(()) VEC(tree
,gc
) *global_decls
;
181 /* An array of builtin function declarations. */
182 static GTY(()) VEC(tree
,gc
) *builtin_decls
;
184 /* An array of global renaming pointers. */
185 static GTY(()) VEC(tree
,gc
) *global_renaming_pointers
;
187 /* A chain of unused BLOCK nodes. */
188 static GTY((deletable
)) tree free_block_chain
;
190 static tree
merge_sizes (tree
, tree
, tree
, bool, bool);
191 static tree
compute_related_constant (tree
, tree
);
192 static tree
split_plus (tree
, tree
*);
193 static void gnat_gimplify_function (tree
);
194 static tree
float_type_for_precision (int, enum machine_mode
);
195 static tree
convert_to_fat_pointer (tree
, tree
);
196 static tree
convert_to_thin_pointer (tree
, tree
);
197 static tree
make_descriptor_field (const char *,tree
, tree
, tree
);
198 static bool potential_alignment_gap (tree
, tree
, tree
);
200 /* Initialize the association of GNAT nodes to GCC trees. */
203 init_gnat_to_gnu (void)
205 associate_gnat_to_gnu
206 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
209 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
210 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
211 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
213 If GNU_DECL is zero, a previous association is to be reset. */
216 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, bool no_check
)
218 /* Check that GNAT_ENTITY is not already defined and that it is being set
219 to something which is a decl. Raise gigi 401 if not. Usually, this
220 means GNAT_ENTITY is defined twice, but occasionally is due to some
222 gcc_assert (!(gnu_decl
223 && (PRESENT_GNU_TREE (gnat_entity
)
224 || (!no_check
&& !DECL_P (gnu_decl
)))));
226 SET_GNU_TREE (gnat_entity
, gnu_decl
);
229 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
230 Return the ..._DECL node that was associated with it. If there is no tree
231 node associated with GNAT_ENTITY, abort.
233 In some cases, such as delayed elaboration or expressions that need to
234 be elaborated only once, GNAT_ENTITY is really not an entity. */
237 get_gnu_tree (Entity_Id gnat_entity
)
239 gcc_assert (PRESENT_GNU_TREE (gnat_entity
));
240 return GET_GNU_TREE (gnat_entity
);
243 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
246 present_gnu_tree (Entity_Id gnat_entity
)
248 return PRESENT_GNU_TREE (gnat_entity
);
251 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
254 init_dummy_type (void)
257 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
260 /* Make a dummy type corresponding to GNAT_TYPE. */
263 make_dummy_type (Entity_Id gnat_type
)
265 Entity_Id gnat_underlying
= Gigi_Equivalent_Type (gnat_type
);
268 /* If there is an equivalent type, get its underlying type. */
269 if (Present (gnat_underlying
))
270 gnat_underlying
= Underlying_Type (gnat_underlying
);
272 /* If there was no equivalent type (can only happen when just annotating
273 types) or underlying type, go back to the original type. */
274 if (No (gnat_underlying
))
275 gnat_underlying
= gnat_type
;
277 /* If it there already a dummy type, use that one. Else make one. */
278 if (PRESENT_DUMMY_NODE (gnat_underlying
))
279 return GET_DUMMY_NODE (gnat_underlying
);
281 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
283 gnu_type
= make_node (Is_Record_Type (gnat_underlying
)
284 ? tree_code_for_record_type (gnat_underlying
)
286 TYPE_NAME (gnu_type
) = get_entity_name (gnat_type
);
287 TYPE_DUMMY_P (gnu_type
) = 1;
288 TYPE_STUB_DECL (gnu_type
)
289 = create_type_stub_decl (TYPE_NAME (gnu_type
), gnu_type
);
290 if (AGGREGATE_TYPE_P (gnu_type
))
291 TYPE_BY_REFERENCE_P (gnu_type
) = Is_By_Reference_Type (gnat_type
);
293 SET_DUMMY_NODE (gnat_underlying
, gnu_type
);
298 /* Return nonzero if we are currently in the global binding level. */
301 global_bindings_p (void)
303 return ((force_global
|| !current_function_decl
) ? -1 : 0);
306 /* Enter a new binding level. */
311 struct gnat_binding_level
*newlevel
= NULL
;
313 /* Reuse a struct for this binding level, if there is one. */
314 if (free_binding_level
)
316 newlevel
= free_binding_level
;
317 free_binding_level
= free_binding_level
->chain
;
321 = (struct gnat_binding_level
*)
322 ggc_alloc (sizeof (struct gnat_binding_level
));
324 /* Use a free BLOCK, if any; otherwise, allocate one. */
325 if (free_block_chain
)
327 newlevel
->block
= free_block_chain
;
328 free_block_chain
= BLOCK_CHAIN (free_block_chain
);
329 BLOCK_CHAIN (newlevel
->block
) = NULL_TREE
;
332 newlevel
->block
= make_node (BLOCK
);
334 /* Point the BLOCK we just made to its parent. */
335 if (current_binding_level
)
336 BLOCK_SUPERCONTEXT (newlevel
->block
) = current_binding_level
->block
;
338 BLOCK_VARS (newlevel
->block
) = BLOCK_SUBBLOCKS (newlevel
->block
) = NULL_TREE
;
339 TREE_USED (newlevel
->block
) = 1;
341 /* Add this level to the front of the chain (stack) of levels that are
343 newlevel
->chain
= current_binding_level
;
344 newlevel
->jmpbuf_decl
= NULL_TREE
;
345 current_binding_level
= newlevel
;
348 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
349 and point FNDECL to this BLOCK. */
352 set_current_block_context (tree fndecl
)
354 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
355 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
358 /* Set the jmpbuf_decl for the current binding level to DECL. */
361 set_block_jmpbuf_decl (tree decl
)
363 current_binding_level
->jmpbuf_decl
= decl
;
366 /* Get the jmpbuf_decl, if any, for the current binding level. */
369 get_block_jmpbuf_decl ()
371 return current_binding_level
->jmpbuf_decl
;
374 /* Exit a binding level. Set any BLOCK into the current code group. */
379 struct gnat_binding_level
*level
= current_binding_level
;
380 tree block
= level
->block
;
382 BLOCK_VARS (block
) = nreverse (BLOCK_VARS (block
));
383 BLOCK_SUBBLOCKS (block
) = nreverse (BLOCK_SUBBLOCKS (block
));
385 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
386 are no variables free the block and merge its subblocks into those of its
387 parent block. Otherwise, add it to the list of its parent. */
388 if (TREE_CODE (BLOCK_SUPERCONTEXT (block
)) == FUNCTION_DECL
)
390 else if (BLOCK_VARS (block
) == NULL_TREE
)
392 BLOCK_SUBBLOCKS (level
->chain
->block
)
393 = chainon (BLOCK_SUBBLOCKS (block
),
394 BLOCK_SUBBLOCKS (level
->chain
->block
));
395 BLOCK_CHAIN (block
) = free_block_chain
;
396 free_block_chain
= block
;
400 BLOCK_CHAIN (block
) = BLOCK_SUBBLOCKS (level
->chain
->block
);
401 BLOCK_SUBBLOCKS (level
->chain
->block
) = block
;
402 TREE_USED (block
) = 1;
403 set_block_for_group (block
);
406 /* Free this binding structure. */
407 current_binding_level
= level
->chain
;
408 level
->chain
= free_binding_level
;
409 free_binding_level
= level
;
413 /* Records a ..._DECL node DECL as belonging to the current lexical scope
414 and uses GNAT_NODE for location information and propagating flags. */
417 gnat_pushdecl (tree decl
, Node_Id gnat_node
)
419 /* If this decl is public external or at toplevel, there is no context.
420 But PARM_DECLs always go in the level of its function. */
421 if (TREE_CODE (decl
) != PARM_DECL
422 && ((DECL_EXTERNAL (decl
) && TREE_PUBLIC (decl
))
423 || global_bindings_p ()))
424 DECL_CONTEXT (decl
) = 0;
427 DECL_CONTEXT (decl
) = current_function_decl
;
429 /* Functions imported in another function are not really nested. */
430 if (TREE_CODE (decl
) == FUNCTION_DECL
&& TREE_PUBLIC (decl
))
431 DECL_NO_STATIC_CHAIN (decl
) = 1;
434 TREE_NO_WARNING (decl
) = (gnat_node
== Empty
|| Warnings_Off (gnat_node
));
436 /* Set the location of DECL and emit a declaration for it. */
437 if (Present (gnat_node
))
438 Sloc_to_locus (Sloc (gnat_node
), &DECL_SOURCE_LOCATION (decl
));
439 add_decl_expr (decl
, gnat_node
);
441 /* Put the declaration on the list. The list of declarations is in reverse
442 order. The list will be reversed later. Put global variables in the
443 globals list and builtin functions in a dedicated list to speed up
444 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
445 the list, as they will cause trouble with the debugger and aren't needed
447 if (TREE_CODE (decl
) != TYPE_DECL
448 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
)
450 if (global_bindings_p ())
452 VEC_safe_push (tree
, gc
, global_decls
, decl
);
454 if (TREE_CODE (decl
) == FUNCTION_DECL
&& DECL_BUILT_IN (decl
))
455 VEC_safe_push (tree
, gc
, builtin_decls
, decl
);
459 TREE_CHAIN (decl
) = BLOCK_VARS (current_binding_level
->block
);
460 BLOCK_VARS (current_binding_level
->block
) = decl
;
464 /* For the declaration of a type, set its name if it either is not already
465 set or if the previous type name was not derived from a source name.
466 We'd rather have the type named with a real name and all the pointer
467 types to the same object have the same POINTER_TYPE node. Code in the
468 equivalent function of c-decl.c makes a copy of the type node here, but
469 that may cause us trouble with incomplete types. We make an exception
470 for fat pointer types because the compiler automatically builds them
471 for unconstrained array types and the debugger uses them to represent
472 both these and pointers to these. */
473 if (TREE_CODE (decl
) == TYPE_DECL
&& DECL_NAME (decl
))
475 tree t
= TREE_TYPE (decl
);
477 if (!(TYPE_NAME (t
) && TREE_CODE (TYPE_NAME (t
)) == TYPE_DECL
))
479 else if (TYPE_FAT_POINTER_P (t
))
481 tree tt
= build_variant_type_copy (t
);
482 TYPE_NAME (tt
) = decl
;
483 TREE_USED (tt
) = TREE_USED (t
);
484 TREE_TYPE (decl
) = tt
;
485 DECL_ORIGINAL_TYPE (decl
) = t
;
488 else if (DECL_ARTIFICIAL (TYPE_NAME (t
)) && !DECL_ARTIFICIAL (decl
))
493 /* Propagate the name to all the variants. This is needed for
494 the type qualifiers machinery to work properly. */
496 for (t
= TYPE_MAIN_VARIANT (t
); t
; t
= TYPE_NEXT_VARIANT (t
))
497 TYPE_NAME (t
) = decl
;
501 /* Do little here. Set up the standard declarations later after the
502 front end has been run. */
505 gnat_init_decl_processing (void)
507 /* Make the binding_level structure for global names. */
508 current_function_decl
= 0;
509 current_binding_level
= 0;
510 free_binding_level
= 0;
513 build_common_tree_nodes (true, true);
515 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
516 corresponding to the width of Pmode. In most cases when ptr_mode
517 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
518 But we get far better code using the width of Pmode. */
519 size_type_node
= gnat_type_for_mode (Pmode
, 0);
520 set_sizetype (size_type_node
);
522 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
523 boolean_type_node
= make_node (BOOLEAN_TYPE
);
524 TYPE_PRECISION (boolean_type_node
) = 1;
525 fixup_unsigned_type (boolean_type_node
);
526 TYPE_RM_SIZE (boolean_type_node
) = bitsize_int (1);
528 build_common_tree_nodes_2 (0);
530 ptr_void_type_node
= build_pointer_type (void_type_node
);
533 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
536 record_builtin_type (const char *name
, tree type
)
538 tree type_decl
= build_decl (TYPE_DECL
, get_identifier (name
), type
);
540 gnat_pushdecl (type_decl
, Empty
);
542 if (debug_hooks
->type_decl
)
543 debug_hooks
->type_decl (type_decl
, false);
546 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
547 finish constructing the record or union type. If REP_LEVEL is zero, this
548 record has no representation clause and so will be entirely laid out here.
549 If REP_LEVEL is one, this record has a representation clause and has been
550 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
551 this record is derived from a parent record and thus inherits its layout;
552 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
553 true, the record type is expected to be modified afterwards so it will
554 not be sent to the back-end for finalization. */
557 finish_record_type (tree record_type
, tree fieldlist
, int rep_level
,
558 bool do_not_finalize
)
560 enum tree_code code
= TREE_CODE (record_type
);
561 tree name
= TYPE_NAME (record_type
);
562 tree ada_size
= bitsize_zero_node
;
563 tree size
= bitsize_zero_node
;
564 bool had_size
= TYPE_SIZE (record_type
) != 0;
565 bool had_size_unit
= TYPE_SIZE_UNIT (record_type
) != 0;
566 bool had_align
= TYPE_ALIGN (record_type
) != 0;
569 TYPE_FIELDS (record_type
) = fieldlist
;
571 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
572 generate debug info and have a parallel type. */
573 if (name
&& TREE_CODE (name
) == TYPE_DECL
)
574 name
= DECL_NAME (name
);
575 TYPE_STUB_DECL (record_type
) = create_type_stub_decl (name
, record_type
);
577 /* Globally initialize the record first. If this is a rep'ed record,
578 that just means some initializations; otherwise, layout the record. */
581 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
582 SET_TYPE_MODE (record_type
, BLKmode
);
585 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
587 TYPE_SIZE (record_type
) = bitsize_zero_node
;
589 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
590 out just like a UNION_TYPE, since the size will be fixed. */
591 else if (code
== QUAL_UNION_TYPE
)
596 /* Ensure there isn't a size already set. There can be in an error
597 case where there is a rep clause but all fields have errors and
598 no longer have a position. */
599 TYPE_SIZE (record_type
) = 0;
600 layout_type (record_type
);
603 /* At this point, the position and size of each field is known. It was
604 either set before entry by a rep clause, or by laying out the type above.
606 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
607 to compute the Ada size; the GCC size and alignment (for rep'ed records
608 that are not padding types); and the mode (for rep'ed records). We also
609 clear the DECL_BIT_FIELD indication for the cases we know have not been
610 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
612 if (code
== QUAL_UNION_TYPE
)
613 fieldlist
= nreverse (fieldlist
);
615 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
617 tree type
= TREE_TYPE (field
);
618 tree pos
= bit_position (field
);
619 tree this_size
= DECL_SIZE (field
);
622 if ((TREE_CODE (type
) == RECORD_TYPE
623 || TREE_CODE (type
) == UNION_TYPE
624 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
625 && !TYPE_IS_FAT_POINTER_P (type
)
626 && !TYPE_CONTAINS_TEMPLATE_P (type
)
627 && TYPE_ADA_SIZE (type
))
628 this_ada_size
= TYPE_ADA_SIZE (type
);
630 this_ada_size
= this_size
;
632 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
633 if (DECL_BIT_FIELD (field
)
634 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
636 unsigned int align
= TYPE_ALIGN (type
);
638 /* In the general case, type alignment is required. */
639 if (value_factor_p (pos
, align
))
641 /* The enclosing record type must be sufficiently aligned.
642 Otherwise, if no alignment was specified for it and it
643 has been laid out already, bump its alignment to the
644 desired one if this is compatible with its size. */
645 if (TYPE_ALIGN (record_type
) >= align
)
647 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
648 DECL_BIT_FIELD (field
) = 0;
652 && value_factor_p (TYPE_SIZE (record_type
), align
))
654 TYPE_ALIGN (record_type
) = align
;
655 DECL_ALIGN (field
) = MAX (DECL_ALIGN (field
), align
);
656 DECL_BIT_FIELD (field
) = 0;
660 /* In the non-strict alignment case, only byte alignment is. */
661 if (!STRICT_ALIGNMENT
662 && DECL_BIT_FIELD (field
)
663 && value_factor_p (pos
, BITS_PER_UNIT
))
664 DECL_BIT_FIELD (field
) = 0;
667 /* If we still have DECL_BIT_FIELD set at this point, we know that the
668 field is technically not addressable. Except that it can actually
669 be addressed if it is BLKmode and happens to be properly aligned. */
670 if (DECL_BIT_FIELD (field
)
671 && !(DECL_MODE (field
) == BLKmode
672 && value_factor_p (pos
, BITS_PER_UNIT
)))
673 DECL_NONADDRESSABLE_P (field
) = 1;
675 /* A type must be as aligned as its most aligned field that is not
676 a bit-field. But this is already enforced by layout_type. */
677 if (rep_level
> 0 && !DECL_BIT_FIELD (field
))
678 TYPE_ALIGN (record_type
)
679 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
684 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
685 size
= size_binop (MAX_EXPR
, size
, this_size
);
688 case QUAL_UNION_TYPE
:
690 = fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
691 this_ada_size
, ada_size
);
692 size
= fold_build3 (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
697 /* Since we know here that all fields are sorted in order of
698 increasing bit position, the size of the record is one
699 higher than the ending bit of the last field processed
700 unless we have a rep clause, since in that case we might
701 have a field outside a QUAL_UNION_TYPE that has a higher ending
702 position. So use a MAX in that case. Also, if this field is a
703 QUAL_UNION_TYPE, we need to take into account the previous size in
704 the case of empty variants. */
706 = merge_sizes (ada_size
, pos
, this_ada_size
,
707 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
709 = merge_sizes (size
, pos
, this_size
,
710 TREE_CODE (type
) == QUAL_UNION_TYPE
, rep_level
> 0);
718 if (code
== QUAL_UNION_TYPE
)
719 nreverse (fieldlist
);
721 /* If the type is discriminated, it can be used to access all its
722 constrained subtypes, so force structural equality checks. */
723 if (CONTAINS_PLACEHOLDER_P (size
))
724 SET_TYPE_STRUCTURAL_EQUALITY (record_type
);
728 /* If this is a padding record, we never want to make the size smaller
729 than what was specified in it, if any. */
730 if (TREE_CODE (record_type
) == RECORD_TYPE
731 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
))
732 size
= TYPE_SIZE (record_type
);
734 /* Now set any of the values we've just computed that apply. */
735 if (!TYPE_IS_FAT_POINTER_P (record_type
)
736 && !TYPE_CONTAINS_TEMPLATE_P (record_type
))
737 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
741 tree size_unit
= had_size_unit
742 ? TYPE_SIZE_UNIT (record_type
)
744 size_binop (CEIL_DIV_EXPR
, size
,
746 unsigned int align
= TYPE_ALIGN (record_type
);
748 TYPE_SIZE (record_type
) = variable_size (round_up (size
, align
));
749 TYPE_SIZE_UNIT (record_type
)
750 = variable_size (round_up (size_unit
, align
/ BITS_PER_UNIT
));
752 compute_record_mode (record_type
);
756 if (!do_not_finalize
)
757 rest_of_record_type_compilation (record_type
);
760 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
761 the debug information associated with it. It need not be invoked
762 directly in most cases since finish_record_type takes care of doing
763 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
766 rest_of_record_type_compilation (tree record_type
)
768 tree fieldlist
= TYPE_FIELDS (record_type
);
770 enum tree_code code
= TREE_CODE (record_type
);
771 bool var_size
= false;
773 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have a union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (DECL_SIZE (field
)) != INTEGER_CST
782 /* If a field has a non-constant qualifier, the record will have
783 variable size too. */
784 || (code
== QUAL_UNION_TYPE
785 && TREE_CODE (DECL_QUALIFIER (field
)) != INTEGER_CST
))
792 /* If this record is of variable size, rename it so that the
793 debugger knows it is and make a new, parallel, record
794 that tells the debugger how the record is laid out. See
795 exp_dbug.ads. But don't do this for records that are padding
796 since they confuse GDB. */
798 && !(TREE_CODE (record_type
) == RECORD_TYPE
799 && TYPE_IS_PADDING_P (record_type
)))
802 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
803 ? UNION_TYPE
: TREE_CODE (record_type
));
804 tree orig_name
= TYPE_NAME (record_type
), new_name
;
805 tree last_pos
= bitsize_zero_node
;
806 tree old_field
, prev_old_field
= NULL_TREE
;
808 if (TREE_CODE (orig_name
) == TYPE_DECL
)
809 orig_name
= DECL_NAME (orig_name
);
812 = concat_name (orig_name
, TREE_CODE (record_type
) == QUAL_UNION_TYPE
814 TYPE_NAME (new_record_type
) = new_name
;
815 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
816 TYPE_STUB_DECL (new_record_type
)
817 = create_type_stub_decl (new_name
, new_record_type
);
818 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
819 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
820 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
821 TYPE_SIZE_UNIT (new_record_type
)
822 = size_int (TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
824 add_parallel_type (TYPE_STUB_DECL (record_type
), new_record_type
);
826 /* Now scan all the fields, replacing each field with a new
827 field corresponding to the new encoding. */
828 for (old_field
= TYPE_FIELDS (record_type
); old_field
;
829 old_field
= TREE_CHAIN (old_field
))
831 tree field_type
= TREE_TYPE (old_field
);
832 tree field_name
= DECL_NAME (old_field
);
834 tree curpos
= bit_position (old_field
);
836 unsigned int align
= 0;
839 /* See how the position was modified from the last position.
841 There are two basic cases we support: a value was added
842 to the last position or the last position was rounded to
843 a boundary and they something was added. Check for the
844 first case first. If not, see if there is any evidence
845 of rounding. If so, round the last position and try
848 If this is a union, the position can be taken as zero. */
850 /* Some computations depend on the shape of the position expression,
851 so strip conversions to make sure it's exposed. */
852 curpos
= remove_conversions (curpos
, true);
854 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
855 pos
= bitsize_zero_node
, align
= 0;
857 pos
= compute_related_constant (curpos
, last_pos
);
859 if (!pos
&& TREE_CODE (curpos
) == MULT_EXPR
860 && host_integerp (TREE_OPERAND (curpos
, 1), 1))
862 tree offset
= TREE_OPERAND (curpos
, 0);
863 align
= tree_low_cst (TREE_OPERAND (curpos
, 1), 1);
865 /* An offset which is a bitwise AND with a negative power of 2
866 means an alignment corresponding to this power of 2. */
867 offset
= remove_conversions (offset
, true);
868 if (TREE_CODE (offset
) == BIT_AND_EXPR
869 && host_integerp (TREE_OPERAND (offset
, 1), 0)
870 && tree_int_cst_sgn (TREE_OPERAND (offset
, 1)) < 0)
873 = - tree_low_cst (TREE_OPERAND (offset
, 1), 0);
874 if (exact_log2 (pow
) > 0)
878 pos
= compute_related_constant (curpos
,
879 round_up (last_pos
, align
));
881 else if (!pos
&& TREE_CODE (curpos
) == PLUS_EXPR
882 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
883 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
884 && host_integerp (TREE_OPERAND
885 (TREE_OPERAND (curpos
, 0), 1),
890 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
891 pos
= compute_related_constant (curpos
,
892 round_up (last_pos
, align
));
894 else if (potential_alignment_gap (prev_old_field
, old_field
,
897 align
= TYPE_ALIGN (field_type
);
898 pos
= compute_related_constant (curpos
,
899 round_up (last_pos
, align
));
902 /* If we can't compute a position, set it to zero.
904 ??? We really should abort here, but it's too much work
905 to get this correct for all cases. */
908 pos
= bitsize_zero_node
;
910 /* See if this type is variable-sized and make a pointer type
911 and indicate the indirection if so. Beware that the debug
912 back-end may adjust the position computed above according
913 to the alignment of the field type, i.e. the pointer type
914 in this case, if we don't preventively counter that. */
915 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
917 field_type
= build_pointer_type (field_type
);
918 if (align
!= 0 && TYPE_ALIGN (field_type
) > align
)
920 field_type
= copy_node (field_type
);
921 TYPE_ALIGN (field_type
) = align
;
926 /* Make a new field name, if necessary. */
927 if (var
|| align
!= 0)
932 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
933 align
/ BITS_PER_UNIT
);
935 strcpy (suffix
, "XVL");
937 field_name
= concat_name (field_name
, suffix
);
940 new_field
= create_field_decl (field_name
, field_type
,
942 DECL_SIZE (old_field
), pos
, 0);
943 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
944 TYPE_FIELDS (new_record_type
) = new_field
;
946 /* If old_field is a QUAL_UNION_TYPE, take its size as being
947 zero. The only time it's not the last field of the record
948 is when there are other components at fixed positions after
949 it (meaning there was a rep clause for every field) and we
950 want to be able to encode them. */
951 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
952 (TREE_CODE (TREE_TYPE (old_field
))
955 : DECL_SIZE (old_field
));
956 prev_old_field
= old_field
;
959 TYPE_FIELDS (new_record_type
)
960 = nreverse (TYPE_FIELDS (new_record_type
));
962 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type
));
965 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type
));
968 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
971 add_parallel_type (tree decl
, tree parallel_type
)
975 while (DECL_PARALLEL_TYPE (d
))
976 d
= TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d
));
978 SET_DECL_PARALLEL_TYPE (d
, parallel_type
);
981 /* Return the parallel type associated to a type, if any. */
984 get_parallel_type (tree type
)
986 if (TYPE_STUB_DECL (type
))
987 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type
));
992 /* Utility function of above to merge LAST_SIZE, the previous size of a record
993 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
994 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
995 replace a value of zero with the old size. If HAS_REP is true, we take the
996 MAX of the end position of this field with LAST_SIZE. In all other cases,
997 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1000 merge_sizes (tree last_size
, tree first_bit
, tree size
, bool special
,
1003 tree type
= TREE_TYPE (last_size
);
1006 if (!special
|| TREE_CODE (size
) != COND_EXPR
)
1008 new = size_binop (PLUS_EXPR
, first_bit
, size
);
1010 new = size_binop (MAX_EXPR
, last_size
, new);
1014 new = fold_build3 (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1015 integer_zerop (TREE_OPERAND (size
, 1))
1016 ? last_size
: merge_sizes (last_size
, first_bit
,
1017 TREE_OPERAND (size
, 1),
1019 integer_zerop (TREE_OPERAND (size
, 2))
1020 ? last_size
: merge_sizes (last_size
, first_bit
,
1021 TREE_OPERAND (size
, 2),
1024 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1025 when fed through substitute_in_expr) into thinking that a constant
1026 size is not constant. */
1027 while (TREE_CODE (new) == NON_LVALUE_EXPR
)
1028 new = TREE_OPERAND (new, 0);
1033 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1034 related by the addition of a constant. Return that constant if so. */
1037 compute_related_constant (tree op0
, tree op1
)
1039 tree op0_var
, op1_var
;
1040 tree op0_con
= split_plus (op0
, &op0_var
);
1041 tree op1_con
= split_plus (op1
, &op1_var
);
1042 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1044 if (operand_equal_p (op0_var
, op1_var
, 0))
1046 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1052 /* Utility function of above to split a tree OP which may be a sum, into a
1053 constant part, which is returned, and a variable part, which is stored
1054 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1058 split_plus (tree in
, tree
*pvar
)
1060 /* Strip NOPS in order to ease the tree traversal and maximize the
1061 potential for constant or plus/minus discovery. We need to be careful
1062 to always return and set *pvar to bitsizetype trees, but it's worth
1066 *pvar
= convert (bitsizetype
, in
);
1068 if (TREE_CODE (in
) == INTEGER_CST
)
1070 *pvar
= bitsize_zero_node
;
1071 return convert (bitsizetype
, in
);
1073 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1075 tree lhs_var
, rhs_var
;
1076 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1077 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1079 if (lhs_var
== TREE_OPERAND (in
, 0)
1080 && rhs_var
== TREE_OPERAND (in
, 1))
1081 return bitsize_zero_node
;
1083 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1084 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1087 return bitsize_zero_node
;
1090 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1091 subprogram. If it is void_type_node, then we are dealing with a procedure,
1092 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1093 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1094 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1095 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1096 object. RETURNS_BY_REF is true if the function returns by reference.
1097 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1098 first parameter) the address of the place to copy its result. */
1101 create_subprog_type (tree return_type
, tree param_decl_list
, tree cico_list
,
1102 bool returns_unconstrained
, bool returns_by_ref
,
1103 bool returns_by_target_ptr
)
1105 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1106 the subprogram formal parameters. This list is generated by traversing the
1107 input list of PARM_DECL nodes. */
1108 tree param_type_list
= NULL
;
1112 for (param_decl
= param_decl_list
; param_decl
;
1113 param_decl
= TREE_CHAIN (param_decl
))
1114 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1117 /* The list of the function parameter types has to be terminated by the void
1118 type to signal to the back-end that we are not dealing with a variable
1119 parameter subprogram, but that the subprogram has a fixed number of
1121 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1123 /* The list of argument types has been created in reverse
1125 param_type_list
= nreverse (param_type_list
);
1127 type
= build_function_type (return_type
, param_type_list
);
1129 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1130 or the new type should, make a copy of TYPE. Likewise for
1131 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1132 if (TYPE_CI_CO_LIST (type
) || cico_list
1133 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1134 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
1135 || TYPE_RETURNS_BY_TARGET_PTR_P (type
) != returns_by_target_ptr
)
1136 type
= copy_type (type
);
1138 TYPE_CI_CO_LIST (type
) = cico_list
;
1139 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1140 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1141 TYPE_RETURNS_BY_TARGET_PTR_P (type
) = returns_by_target_ptr
;
1145 /* Return a copy of TYPE but safe to modify in any way. */
1148 copy_type (tree type
)
1150 tree
new = copy_node (type
);
1152 /* copy_node clears this field instead of copying it, because it is
1153 aliased with TREE_CHAIN. */
1154 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1156 TYPE_POINTER_TO (new) = 0;
1157 TYPE_REFERENCE_TO (new) = 0;
1158 TYPE_MAIN_VARIANT (new) = new;
1159 TYPE_NEXT_VARIANT (new) = 0;
1164 /* Return a subtype of sizetype with range MIN to MAX and whose
1165 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1166 of the associated TYPE_DECL. */
1169 create_index_type (tree min
, tree max
, tree index
, Node_Id gnat_node
)
1171 /* First build a type for the desired range. */
1172 tree type
= build_index_2_type (min
, max
);
1174 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1175 if (TYPE_INDEX_TYPE (type
) == index
)
1178 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1179 no way of sharing these types, but that's only a small hole. */
1180 if (TYPE_INDEX_TYPE (type
))
1181 type
= copy_type (type
);
1183 SET_TYPE_INDEX_TYPE (type
, index
);
1184 create_type_decl (NULL_TREE
, type
, NULL
, true, false, gnat_node
);
1189 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1190 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1194 create_type_stub_decl (tree type_name
, tree type
)
1196 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1197 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1198 emitted in DWARF. */
1199 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1200 DECL_ARTIFICIAL (type_decl
) = 1;
1204 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1205 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1206 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1207 true if we need to write debug information about this type. GNAT_NODE
1208 is used for the position of the decl. */
1211 create_type_decl (tree type_name
, tree type
, struct attrib
*attr_list
,
1212 bool artificial_p
, bool debug_info_p
, Node_Id gnat_node
)
1214 enum tree_code code
= TREE_CODE (type
);
1215 bool named
= TYPE_NAME (type
) && TREE_CODE (TYPE_NAME (type
)) == TYPE_DECL
;
1218 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1219 gcc_assert (!TYPE_IS_DUMMY_P (type
));
1221 /* If the type hasn't been named yet, we're naming it; preserve an existing
1222 TYPE_STUB_DECL that has been attached to it for some purpose. */
1223 if (!named
&& TYPE_STUB_DECL (type
))
1225 type_decl
= TYPE_STUB_DECL (type
);
1226 DECL_NAME (type_decl
) = type_name
;
1229 type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1231 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1232 gnat_pushdecl (type_decl
, gnat_node
);
1233 process_attributes (type_decl
, attr_list
);
1235 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1236 This causes the name to be also viewed as a "tag" by the debug
1237 back-end, with the advantage that no DW_TAG_typedef is emitted
1238 for artificial "tagged" types in DWARF. */
1240 TYPE_STUB_DECL (type
) = type_decl
;
1242 /* Pass the type declaration to the debug back-end unless this is an
1243 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1244 type for which debugging information was not requested, or else an
1245 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1246 handled separately. And do not pass dummy types either. */
1247 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| !debug_info_p
)
1248 DECL_IGNORED_P (type_decl
) = 1;
1249 else if (code
!= ENUMERAL_TYPE
1250 && (code
!= RECORD_TYPE
|| TYPE_IS_FAT_POINTER_P (type
))
1251 && !((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1252 && TYPE_IS_DUMMY_P (TREE_TYPE (type
)))
1253 && !(code
== RECORD_TYPE
1255 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type
))))))
1256 rest_of_type_decl_compilation (type_decl
);
1261 /* Return a VAR_DECL or CONST_DECL node.
1263 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1264 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1265 the GCC tree for an optional initial expression; NULL_TREE if none.
1267 CONST_FLAG is true if this variable is constant, in which case we might
1268 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1270 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1271 definition to be made visible outside of the current compilation unit, for
1272 instance variable definitions in a package specification.
1274 EXTERN_FLAG is true when processing an external variable declaration (as
1275 opposed to a definition: no storage is to be allocated for the variable).
1277 STATIC_FLAG is only relevant when not at top level. In that case
1278 it indicates whether to always allocate storage to the variable.
1280 GNAT_NODE is used for the position of the decl. */
1283 create_var_decl_1 (tree var_name
, tree asm_name
, tree type
, tree var_init
,
1284 bool const_flag
, bool public_flag
, bool extern_flag
,
1285 bool static_flag
, bool const_decl_allowed_p
,
1286 struct attrib
*attr_list
, Node_Id gnat_node
)
1290 && gnat_types_compatible_p (type
, TREE_TYPE (var_init
))
1291 && (global_bindings_p () || static_flag
1292 ? initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
)) != 0
1293 : TREE_CONSTANT (var_init
)));
1295 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1296 case the initializer may be used in-lieu of the DECL node (as done in
1297 Identifier_to_gnu). This is useful to prevent the need of elaboration
1298 code when an identifier for which such a decl is made is in turn used as
1299 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1300 but extra constraints apply to this choice (see below) and are not
1301 relevant to the distinction we wish to make. */
1302 bool constant_p
= const_flag
&& init_const
;
1304 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1305 and may be used for scalars in general but not for aggregates. */
1307 = build_decl ((constant_p
&& const_decl_allowed_p
1308 && !AGGREGATE_TYPE_P (type
)) ? CONST_DECL
: VAR_DECL
,
1311 /* If this is external, throw away any initializations (they will be done
1312 elsewhere) unless this is a constant for which we would like to remain
1313 able to get the initializer. If we are defining a global here, leave a
1314 constant initialization and save any variable elaborations for the
1315 elaboration routine. If we are just annotating types, throw away the
1316 initialization if it isn't a constant. */
1317 if ((extern_flag
&& !constant_p
)
1318 || (type_annotate_only
&& var_init
&& !TREE_CONSTANT (var_init
)))
1319 var_init
= NULL_TREE
;
1321 /* At the global level, an initializer requiring code to be generated
1322 produces elaboration statements. Check that such statements are allowed,
1323 that is, not violating a No_Elaboration_Code restriction. */
1324 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1325 Check_Elaboration_Code_Allowed (gnat_node
);
1327 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1328 try to fiddle with DECL_COMMON. However, on platforms that don't
1329 support global BSS sections, uninitialized global variables would
1330 go in DATA instead, thus increasing the size of the executable. */
1332 && TREE_CODE (var_decl
) == VAR_DECL
1333 && !have_global_bss_p ())
1334 DECL_COMMON (var_decl
) = 1;
1335 DECL_INITIAL (var_decl
) = var_init
;
1336 TREE_READONLY (var_decl
) = const_flag
;
1337 DECL_EXTERNAL (var_decl
) = extern_flag
;
1338 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1339 TREE_CONSTANT (var_decl
) = constant_p
;
1340 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1341 = TYPE_VOLATILE (type
);
1343 /* If it's public and not external, always allocate storage for it.
1344 At the global binding level we need to allocate static storage for the
1345 variable if and only if it's not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl
)
1348 = !extern_flag
&& (public_flag
|| static_flag
|| global_bindings_p ());
1350 /* For an external constant whose initializer is not absolute, do not emit
1351 debug info. In DWARF this would mean a global relocation in a read-only
1352 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1355 && initializer_constant_valid_p (var_init
, TREE_TYPE (var_init
))
1356 != null_pointer_node
)
1357 DECL_IGNORED_P (var_decl
) = 1;
1359 if (asm_name
&& VAR_OR_FUNCTION_DECL_P (var_decl
))
1360 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1362 process_attributes (var_decl
, attr_list
);
1364 /* Add this decl to the current binding level. */
1365 gnat_pushdecl (var_decl
, gnat_node
);
1367 if (TREE_SIDE_EFFECTS (var_decl
))
1368 TREE_ADDRESSABLE (var_decl
) = 1;
1370 if (TREE_CODE (var_decl
) != CONST_DECL
)
1372 if (global_bindings_p ())
1373 rest_of_decl_compilation (var_decl
, true, 0);
1376 expand_decl (var_decl
);
1381 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1384 aggregate_type_contains_array_p (tree type
)
1386 switch (TREE_CODE (type
))
1390 case QUAL_UNION_TYPE
:
1393 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
1394 if (AGGREGATE_TYPE_P (TREE_TYPE (field
))
1395 && aggregate_type_contains_array_p (TREE_TYPE (field
)))
1408 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1409 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1410 this field is in a record type with a "pragma pack". If SIZE is nonzero
1411 it is the specified size for this field. If POS is nonzero, it is the bit
1412 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1413 the address of this field for aliasing purposes. If it is negative, we
1414 should not make a bitfield, which is used by make_aligning_type. */
1417 create_field_decl (tree field_name
, tree field_type
, tree record_type
,
1418 int packed
, tree size
, tree pos
, int addressable
)
1420 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1422 DECL_CONTEXT (field_decl
) = record_type
;
1423 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1425 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1426 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1427 Likewise for an aggregate without specified position that contains an
1428 array, because in this case slices of variable length of this array
1429 must be handled by GCC and variable-sized objects need to be aligned
1430 to at least a byte boundary. */
1431 if (packed
&& (TYPE_MODE (field_type
) == BLKmode
1433 && AGGREGATE_TYPE_P (field_type
)
1434 && aggregate_type_contains_array_p (field_type
))))
1435 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1437 /* If a size is specified, use it. Otherwise, if the record type is packed
1438 compute a size to use, which may differ from the object's natural size.
1439 We always set a size in this case to trigger the checks for bitfield
1440 creation below, which is typically required when no position has been
1443 size
= convert (bitsizetype
, size
);
1444 else if (packed
== 1)
1446 size
= rm_size (field_type
);
1448 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1450 if (TREE_CODE (size
) == INTEGER_CST
1451 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1452 size
= round_up (size
, BITS_PER_UNIT
);
1455 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1456 specified for two reasons: first if the size differs from the natural
1457 size. Second, if the alignment is insufficient. There are a number of
1458 ways the latter can be true.
1460 We never make a bitfield if the type of the field has a nonconstant size,
1461 because no such entity requiring bitfield operations should reach here.
1463 We do *preventively* make a bitfield when there might be the need for it
1464 but we don't have all the necessary information to decide, as is the case
1465 of a field with no specified position in a packed record.
1467 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1468 in layout_decl or finish_record_type to clear the bit_field indication if
1469 it is in fact not needed. */
1470 if (addressable
>= 0
1472 && TREE_CODE (size
) == INTEGER_CST
1473 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1474 && (!tree_int_cst_equal (size
, TYPE_SIZE (field_type
))
1475 || (pos
&& !value_factor_p (pos
, TYPE_ALIGN (field_type
)))
1477 || (TYPE_ALIGN (record_type
) != 0
1478 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1480 DECL_BIT_FIELD (field_decl
) = 1;
1481 DECL_SIZE (field_decl
) = size
;
1482 if (!packed
&& !pos
)
1484 if (TYPE_ALIGN (record_type
) != 0
1485 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))
1486 DECL_ALIGN (field_decl
) = TYPE_ALIGN (record_type
);
1488 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1492 DECL_PACKED (field_decl
) = pos
? DECL_BIT_FIELD (field_decl
) : packed
;
1494 /* Bump the alignment if need be, either for bitfield/packing purposes or
1495 to satisfy the type requirements if no such consideration applies. When
1496 we get the alignment from the type, indicate if this is from an explicit
1497 user request, which prevents stor-layout from lowering it later on. */
1499 unsigned int bit_align
1500 = (DECL_BIT_FIELD (field_decl
) ? 1
1501 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
: 0);
1503 if (bit_align
> DECL_ALIGN (field_decl
))
1504 DECL_ALIGN (field_decl
) = bit_align
;
1505 else if (!bit_align
&& TYPE_ALIGN (field_type
) > DECL_ALIGN (field_decl
))
1507 DECL_ALIGN (field_decl
) = TYPE_ALIGN (field_type
);
1508 DECL_USER_ALIGN (field_decl
) = TYPE_USER_ALIGN (field_type
);
1514 /* We need to pass in the alignment the DECL is known to have.
1515 This is the lowest-order bit set in POS, but no more than
1516 the alignment of the record, if one is specified. Note
1517 that an alignment of 0 is taken as infinite. */
1518 unsigned int known_align
;
1520 if (host_integerp (pos
, 1))
1521 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1523 known_align
= BITS_PER_UNIT
;
1525 if (TYPE_ALIGN (record_type
)
1526 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1527 known_align
= TYPE_ALIGN (record_type
);
1529 layout_decl (field_decl
, known_align
);
1530 SET_DECL_OFFSET_ALIGN (field_decl
,
1531 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1533 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1534 &DECL_FIELD_BIT_OFFSET (field_decl
),
1535 DECL_OFFSET_ALIGN (field_decl
), pos
);
1538 /* In addition to what our caller says, claim the field is addressable if we
1539 know that its type is not suitable.
1541 The field may also be "technically" nonaddressable, meaning that even if
1542 we attempt to take the field's address we will actually get the address
1543 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1544 value we have at this point is not accurate enough, so we don't account
1545 for this here and let finish_record_type decide. */
1546 if (!addressable
&& !type_for_nonaliased_component_p (field_type
))
1549 DECL_NONADDRESSABLE_P (field_decl
) = !addressable
;
1554 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1555 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1556 (either an In parameter or an address of a pass-by-ref parameter). */
1559 create_param_decl (tree param_name
, tree param_type
, bool readonly
)
1561 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1563 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1564 can lead to various ABI violations. */
1565 if (targetm
.calls
.promote_prototypes (NULL_TREE
)
1566 && INTEGRAL_TYPE_P (param_type
)
1567 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1569 /* We have to be careful about biased types here. Make a subtype
1570 of integer_type_node with the proper biasing. */
1571 if (TREE_CODE (param_type
) == INTEGER_TYPE
1572 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1574 tree subtype
= make_node (INTEGER_TYPE
);
1575 TREE_TYPE (subtype
) = integer_type_node
;
1576 TYPE_BIASED_REPRESENTATION_P (subtype
) = 1;
1578 TYPE_UNSIGNED (subtype
) = 1;
1579 TYPE_PRECISION (subtype
) = TYPE_PRECISION (integer_type_node
);
1580 TYPE_MIN_VALUE (subtype
) = TYPE_MIN_VALUE (param_type
);
1581 TYPE_MAX_VALUE (subtype
) = TYPE_MAX_VALUE (param_type
);
1582 layout_type (subtype
);
1584 param_type
= subtype
;
1587 param_type
= integer_type_node
;
1590 DECL_ARG_TYPE (param_decl
) = param_type
;
1591 TREE_READONLY (param_decl
) = readonly
;
1595 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1598 process_attributes (tree decl
, struct attrib
*attr_list
)
1600 for (; attr_list
; attr_list
= attr_list
->next
)
1601 switch (attr_list
->type
)
1603 case ATTR_MACHINE_ATTRIBUTE
:
1604 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->args
,
1606 ATTR_FLAG_TYPE_IN_PLACE
);
1609 case ATTR_LINK_ALIAS
:
1610 if (! DECL_EXTERNAL (decl
))
1612 TREE_STATIC (decl
) = 1;
1613 assemble_alias (decl
, attr_list
->name
);
1617 case ATTR_WEAK_EXTERNAL
:
1619 declare_weak (decl
);
1621 post_error ("?weak declarations not supported on this target",
1622 attr_list
->error_point
);
1625 case ATTR_LINK_SECTION
:
1626 if (targetm
.have_named_sections
)
1628 DECL_SECTION_NAME (decl
)
1629 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1630 IDENTIFIER_POINTER (attr_list
->name
));
1631 DECL_COMMON (decl
) = 0;
1634 post_error ("?section attributes are not supported for this target",
1635 attr_list
->error_point
);
1638 case ATTR_LINK_CONSTRUCTOR
:
1639 DECL_STATIC_CONSTRUCTOR (decl
) = 1;
1640 TREE_USED (decl
) = 1;
1643 case ATTR_LINK_DESTRUCTOR
:
1644 DECL_STATIC_DESTRUCTOR (decl
) = 1;
1645 TREE_USED (decl
) = 1;
1648 case ATTR_THREAD_LOCAL_STORAGE
:
1649 DECL_TLS_MODEL (decl
) = decl_default_tls_model (decl
);
1650 DECL_COMMON (decl
) = 0;
1655 /* Record DECL as a global renaming pointer. */
1658 record_global_renaming_pointer (tree decl
)
1660 gcc_assert (DECL_RENAMED_OBJECT (decl
));
1661 VEC_safe_push (tree
, gc
, global_renaming_pointers
, decl
);
1664 /* Invalidate the global renaming pointers. */
1667 invalidate_global_renaming_pointers (void)
1672 for (i
= 0; VEC_iterate(tree
, global_renaming_pointers
, i
, iter
); i
++)
1673 SET_DECL_RENAMED_OBJECT (iter
, NULL_TREE
);
1675 VEC_free (tree
, gc
, global_renaming_pointers
);
1678 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1682 value_factor_p (tree value
, HOST_WIDE_INT factor
)
1684 if (host_integerp (value
, 1))
1685 return tree_low_cst (value
, 1) % factor
== 0;
1687 if (TREE_CODE (value
) == MULT_EXPR
)
1688 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1689 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1694 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1695 unless we can prove these 2 fields are laid out in such a way that no gap
1696 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1697 is the distance in bits between the end of PREV_FIELD and the starting
1698 position of CURR_FIELD. It is ignored if null. */
1701 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1703 /* If this is the first field of the record, there cannot be any gap */
1707 /* If the previous field is a union type, then return False: The only
1708 time when such a field is not the last field of the record is when
1709 there are other components at fixed positions after it (meaning there
1710 was a rep clause for every field), in which case we don't want the
1711 alignment constraint to override them. */
1712 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1715 /* If the distance between the end of prev_field and the beginning of
1716 curr_field is constant, then there is a gap if the value of this
1717 constant is not null. */
1718 if (offset
&& host_integerp (offset
, 1))
1719 return !integer_zerop (offset
);
1721 /* If the size and position of the previous field are constant,
1722 then check the sum of this size and position. There will be a gap
1723 iff it is not multiple of the current field alignment. */
1724 if (host_integerp (DECL_SIZE (prev_field
), 1)
1725 && host_integerp (bit_position (prev_field
), 1))
1726 return ((tree_low_cst (bit_position (prev_field
), 1)
1727 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1728 % DECL_ALIGN (curr_field
) != 0);
1730 /* If both the position and size of the previous field are multiples
1731 of the current field alignment, there cannot be any gap. */
1732 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1733 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1736 /* Fallback, return that there may be a potential gap */
1740 /* Returns a LABEL_DECL node for LABEL_NAME. */
1743 create_label_decl (tree label_name
)
1745 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1747 DECL_CONTEXT (label_decl
) = current_function_decl
;
1748 DECL_MODE (label_decl
) = VOIDmode
;
1749 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1754 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1755 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1756 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1757 PARM_DECL nodes chained through the TREE_CHAIN field).
1759 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1760 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1763 create_subprog_decl (tree subprog_name
, tree asm_name
,
1764 tree subprog_type
, tree param_decl_list
, bool inline_flag
,
1765 bool public_flag
, bool extern_flag
,
1766 struct attrib
*attr_list
, Node_Id gnat_node
)
1768 tree return_type
= TREE_TYPE (subprog_type
);
1769 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1771 /* If this is a non-inline function nested inside an inlined external
1772 function, we cannot honor both requests without cloning the nested
1773 function in the current unit since it is private to the other unit.
1774 We could inline the nested function as well but it's probably better
1775 to err on the side of too little inlining. */
1777 && current_function_decl
1778 && DECL_DECLARED_INLINE_P (current_function_decl
)
1779 && DECL_EXTERNAL (current_function_decl
))
1780 DECL_DECLARED_INLINE_P (current_function_decl
) = 0;
1782 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1783 TREE_PUBLIC (subprog_decl
) = public_flag
;
1784 TREE_STATIC (subprog_decl
) = 1;
1785 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1786 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1787 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1788 DECL_DECLARED_INLINE_P (subprog_decl
) = inline_flag
;
1789 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1790 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1791 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl
)) = 1;
1792 DECL_IGNORED_P (DECL_RESULT (subprog_decl
)) = 1;
1794 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1795 target by-reference return mechanism. This is not supported all the
1796 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1797 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1798 the RESULT_DECL instead - see gnat_genericize for more details. */
1799 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl
))))
1801 tree result_decl
= DECL_RESULT (subprog_decl
);
1803 TREE_ADDRESSABLE (TREE_TYPE (result_decl
)) = 0;
1804 DECL_BY_REFERENCE (result_decl
) = 1;
1809 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1811 /* The expand_main_function circuitry expects "main_identifier_node" to
1812 designate the DECL_NAME of the 'main' entry point, in turn expected
1813 to be declared as the "main" function literally by default. Ada
1814 program entry points are typically declared with a different name
1815 within the binder generated file, exported as 'main' to satisfy the
1816 system expectations. Redirect main_identifier_node in this case. */
1817 if (asm_name
== main_identifier_node
)
1818 main_identifier_node
= DECL_NAME (subprog_decl
);
1821 process_attributes (subprog_decl
, attr_list
);
1823 /* Add this decl to the current binding level. */
1824 gnat_pushdecl (subprog_decl
, gnat_node
);
1826 /* Output the assembler code and/or RTL for the declaration. */
1827 rest_of_decl_compilation (subprog_decl
, global_bindings_p (), 0);
1829 return subprog_decl
;
1832 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1833 body. This routine needs to be invoked before processing the declarations
1834 appearing in the subprogram. */
1837 begin_subprog_body (tree subprog_decl
)
1841 current_function_decl
= subprog_decl
;
1842 announce_function (subprog_decl
);
1844 /* Enter a new binding level and show that all the parameters belong to
1847 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1848 param_decl
= TREE_CHAIN (param_decl
))
1849 DECL_CONTEXT (param_decl
) = subprog_decl
;
1851 make_decl_rtl (subprog_decl
);
1853 /* We handle pending sizes via the elaboration of types, so we don't need to
1854 save them. This causes them to be marked as part of the outer function
1855 and then discarded. */
1856 get_pending_sizes ();
1860 /* Helper for the genericization callback. Return a dereference of VAL
1861 if it is of a reference type. */
1864 convert_from_reference (tree val
)
1866 tree value_type
, ref
;
1868 if (TREE_CODE (TREE_TYPE (val
)) != REFERENCE_TYPE
)
1871 value_type
= TREE_TYPE (TREE_TYPE (val
));
1872 ref
= build1 (INDIRECT_REF
, value_type
, val
);
1874 /* See if what we reference is CONST or VOLATILE, which requires
1875 looking into array types to get to the component type. */
1877 while (TREE_CODE (value_type
) == ARRAY_TYPE
)
1878 value_type
= TREE_TYPE (value_type
);
1881 = (TYPE_QUALS (value_type
) & TYPE_QUAL_CONST
);
1882 TREE_THIS_VOLATILE (ref
)
1883 = (TYPE_QUALS (value_type
) & TYPE_QUAL_VOLATILE
);
1885 TREE_SIDE_EFFECTS (ref
)
1886 = (TREE_THIS_VOLATILE (ref
) || TREE_SIDE_EFFECTS (val
));
1891 /* Helper for the genericization callback. Returns true if T denotes
1892 a RESULT_DECL with DECL_BY_REFERENCE set. */
1895 is_byref_result (tree t
)
1897 return (TREE_CODE (t
) == RESULT_DECL
&& DECL_BY_REFERENCE (t
));
1901 /* Tree walking callback for gnat_genericize. Currently ...
1903 o Adjust references to the function's DECL_RESULT if it is marked
1904 DECL_BY_REFERENCE and so has had its type turned into a reference
1905 type at the end of the function compilation. */
1908 gnat_genericize_r (tree
*stmt_p
, int *walk_subtrees
, void *data
)
1910 /* This implementation is modeled after what the C++ front-end is
1911 doing, basis of the downstream passes behavior. */
1913 tree stmt
= *stmt_p
;
1914 struct pointer_set_t
*p_set
= (struct pointer_set_t
*) data
;
1916 /* If we have a direct mention of the result decl, dereference. */
1917 if (is_byref_result (stmt
))
1919 *stmt_p
= convert_from_reference (stmt
);
1924 /* Otherwise, no need to walk the same tree twice. */
1925 if (pointer_set_contains (p_set
, stmt
))
1931 /* If we are taking the address of what now is a reference, just get the
1933 if (TREE_CODE (stmt
) == ADDR_EXPR
1934 && is_byref_result (TREE_OPERAND (stmt
, 0)))
1936 *stmt_p
= convert (TREE_TYPE (stmt
), TREE_OPERAND (stmt
, 0));
1940 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1941 else if (TREE_CODE (stmt
) == RETURN_EXPR
1942 && TREE_OPERAND (stmt
, 0)
1943 && is_byref_result (TREE_OPERAND (stmt
, 0)))
1946 /* Don't look inside trees that cannot embed references of interest. */
1947 else if (IS_TYPE_OR_DECL_P (stmt
))
1950 pointer_set_insert (p_set
, *stmt_p
);
1955 /* Perform lowering of Ada trees to GENERIC. In particular:
1957 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1958 and adjust all the references to this decl accordingly. */
1961 gnat_genericize (tree fndecl
)
1963 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1964 was handled by simply setting TREE_ADDRESSABLE on the result type.
1965 Everything required to actually pass by invisible ref using the target
1966 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1968 This doesn't work with GCC 4 any more for several reasons. First, the
1969 gimplification process might need the creation of temporaries of this
1970 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1971 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1972 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1973 be explicitly accounted for by the front-end in the function body.
1975 We achieve the complete transformation in two steps:
1977 1/ create_subprog_decl performs early attribute tweaks: it clears
1978 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1979 the result decl. The former ensures that the bit isn't set in the GCC
1980 tree saved for the function, so prevents ICEs on temporary creation.
1981 The latter we use here to trigger the rest of the processing.
1983 2/ This function performs the type transformation on the result decl
1984 and adjusts all the references to this decl from the function body
1987 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1988 strategy, which escapes the gimplifier temporary creation issues by
1989 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1990 on simple specific support code in aggregate_value_p to look at the
1991 target function result decl explicitly. */
1993 struct pointer_set_t
*p_set
;
1994 tree decl_result
= DECL_RESULT (fndecl
);
1996 if (!DECL_BY_REFERENCE (decl_result
))
1999 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2000 occurrences in the function body using the common tree-walking facility.
2001 We want to see every occurrence of the result decl to adjust the
2002 referencing tree, so need to use our own pointer set to control which
2003 trees should be visited again or not. */
2005 p_set
= pointer_set_create ();
2007 TREE_TYPE (decl_result
) = build_reference_type (TREE_TYPE (decl_result
));
2008 TREE_ADDRESSABLE (decl_result
) = 0;
2009 relayout_decl (decl_result
);
2011 walk_tree (&DECL_SAVED_TREE (fndecl
), gnat_genericize_r
, p_set
, NULL
);
2013 pointer_set_destroy (p_set
);
2016 /* Finish the definition of the current subprogram BODY and compile it all the
2017 way to assembler language output. ELAB_P tells if this is called for an
2018 elaboration routine, to be entirely discarded if empty. */
2021 end_subprog_body (tree body
, bool elab_p
)
2023 tree fndecl
= current_function_decl
;
2025 /* Mark the BLOCK for this level as being for this function and pop the
2026 level. Since the vars in it are the parameters, clear them. */
2027 BLOCK_VARS (current_binding_level
->block
) = 0;
2028 BLOCK_SUPERCONTEXT (current_binding_level
->block
) = fndecl
;
2029 DECL_INITIAL (fndecl
) = current_binding_level
->block
;
2032 /* We handle pending sizes via the elaboration of types, so we don't
2033 need to save them. */
2034 get_pending_sizes ();
2036 /* Mark the RESULT_DECL as being in this subprogram. */
2037 DECL_CONTEXT (DECL_RESULT (fndecl
)) = fndecl
;
2039 DECL_SAVED_TREE (fndecl
) = body
;
2041 current_function_decl
= DECL_CONTEXT (fndecl
);
2044 /* We cannot track the location of errors past this point. */
2045 error_gnat_node
= Empty
;
2047 /* If we're only annotating types, don't actually compile this function. */
2048 if (type_annotate_only
)
2051 /* Perform the required pre-gimplification transformations on the tree. */
2052 gnat_genericize (fndecl
);
2054 /* We do different things for nested and non-nested functions.
2055 ??? This should be in cgraph. */
2056 if (!DECL_CONTEXT (fndecl
))
2058 gnat_gimplify_function (fndecl
);
2060 /* If this is an empty elaboration proc, just discard the node.
2061 Otherwise, compile further. */
2062 if (elab_p
&& empty_body_p (gimple_body (fndecl
)))
2063 cgraph_remove_node (cgraph_node (fndecl
));
2065 cgraph_finalize_function (fndecl
, false);
2068 /* Register this function with cgraph just far enough to get it
2069 added to our parent's nested function list. */
2070 (void) cgraph_node (fndecl
);
2073 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2076 gnat_gimplify_function (tree fndecl
)
2078 struct cgraph_node
*cgn
;
2080 dump_function (TDI_original
, fndecl
);
2081 gimplify_function_tree (fndecl
);
2082 dump_function (TDI_generic
, fndecl
);
2084 /* Convert all nested functions to GIMPLE now. We do things in this order
2085 so that items like VLA sizes are expanded properly in the context of the
2086 correct function. */
2087 cgn
= cgraph_node (fndecl
);
2088 for (cgn
= cgn
->nested
; cgn
; cgn
= cgn
->next_nested
)
2089 gnat_gimplify_function (cgn
->decl
);
2093 gnat_builtin_function (tree decl
)
2095 gnat_pushdecl (decl
, Empty
);
2099 /* Return an integer type with the number of bits of precision given by
2100 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2101 it is a signed type. */
2104 gnat_type_for_size (unsigned precision
, int unsignedp
)
2109 if (precision
<= 2 * MAX_BITS_PER_WORD
2110 && signed_and_unsigned_types
[precision
][unsignedp
])
2111 return signed_and_unsigned_types
[precision
][unsignedp
];
2114 t
= make_unsigned_type (precision
);
2116 t
= make_signed_type (precision
);
2118 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2119 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2123 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2124 TYPE_NAME (t
) = get_identifier (type_name
);
2130 /* Likewise for floating-point types. */
2133 float_type_for_precision (int precision
, enum machine_mode mode
)
2138 if (float_types
[(int) mode
])
2139 return float_types
[(int) mode
];
2141 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2142 TYPE_PRECISION (t
) = precision
;
2145 gcc_assert (TYPE_MODE (t
) == mode
);
2148 sprintf (type_name
, "FLOAT_%d", precision
);
2149 TYPE_NAME (t
) = get_identifier (type_name
);
2155 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2156 an unsigned type; otherwise a signed type is returned. */
2159 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2161 if (mode
== BLKmode
)
2163 else if (mode
== VOIDmode
)
2164 return void_type_node
;
2165 else if (COMPLEX_MODE_P (mode
))
2167 else if (SCALAR_FLOAT_MODE_P (mode
))
2168 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2169 else if (SCALAR_INT_MODE_P (mode
))
2170 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2175 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2178 gnat_unsigned_type (tree type_node
)
2180 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2182 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2184 type
= copy_node (type
);
2185 TREE_TYPE (type
) = type_node
;
2187 else if (TREE_TYPE (type_node
)
2188 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2189 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2191 type
= copy_node (type
);
2192 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2198 /* Return the signed version of a TYPE_NODE, a scalar type. */
2201 gnat_signed_type (tree type_node
)
2203 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2205 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2207 type
= copy_node (type
);
2208 TREE_TYPE (type
) = type_node
;
2210 else if (TREE_TYPE (type_node
)
2211 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2212 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2214 type
= copy_node (type
);
2215 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2221 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2222 transparently converted to each other. */
2225 gnat_types_compatible_p (tree t1
, tree t2
)
2227 enum tree_code code
;
2229 /* This is the default criterion. */
2230 if (TYPE_MAIN_VARIANT (t1
) == TYPE_MAIN_VARIANT (t2
))
2233 /* We only check structural equivalence here. */
2234 if ((code
= TREE_CODE (t1
)) != TREE_CODE (t2
))
2237 /* Array types are also compatible if they are constrained and have
2238 the same component type and the same domain. */
2239 if (code
== ARRAY_TYPE
2240 && TREE_TYPE (t1
) == TREE_TYPE (t2
)
2241 && (TYPE_DOMAIN (t1
) == TYPE_DOMAIN (t2
)
2242 || (TYPE_DOMAIN (t1
)
2244 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)),
2245 TYPE_MIN_VALUE (TYPE_DOMAIN (t2
)))
2246 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1
)),
2247 TYPE_MAX_VALUE (TYPE_DOMAIN (t2
))))))
2250 /* Padding record types are also compatible if they pad the same
2251 type and have the same constant size. */
2252 if (code
== RECORD_TYPE
2253 && TYPE_IS_PADDING_P (t1
) && TYPE_IS_PADDING_P (t2
)
2254 && TREE_TYPE (TYPE_FIELDS (t1
)) == TREE_TYPE (TYPE_FIELDS (t2
))
2255 && tree_int_cst_equal (TYPE_SIZE (t1
), TYPE_SIZE (t2
)))
2261 /* EXP is an expression for the size of an object. If this size contains
2262 discriminant references, replace them with the maximum (if MAX_P) or
2263 minimum (if !MAX_P) possible value of the discriminant. */
2266 max_size (tree exp
, bool max_p
)
2268 enum tree_code code
= TREE_CODE (exp
);
2269 tree type
= TREE_TYPE (exp
);
2271 switch (TREE_CODE_CLASS (code
))
2273 case tcc_declaration
:
2278 if (code
== CALL_EXPR
)
2281 int i
, n
= call_expr_nargs (exp
);
2284 argarray
= (tree
*) alloca (n
* sizeof (tree
));
2285 for (i
= 0; i
< n
; i
++)
2286 argarray
[i
] = max_size (CALL_EXPR_ARG (exp
, i
), max_p
);
2287 return build_call_array (type
, CALL_EXPR_FN (exp
), n
, argarray
);
2292 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2293 modify. Otherwise, we treat it like a variable. */
2294 if (!CONTAINS_PLACEHOLDER_P (exp
))
2297 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2299 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), true);
2301 case tcc_comparison
:
2302 return max_p
? size_one_node
: size_zero_node
;
2306 case tcc_expression
:
2307 switch (TREE_CODE_LENGTH (code
))
2310 if (code
== NON_LVALUE_EXPR
)
2311 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2314 fold_build1 (code
, type
,
2315 max_size (TREE_OPERAND (exp
, 0),
2316 code
== NEGATE_EXPR
? !max_p
: max_p
));
2319 if (code
== COMPOUND_EXPR
)
2320 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2322 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2323 may provide a tighter bound on max_size. */
2324 if (code
== MINUS_EXPR
2325 && TREE_CODE (TREE_OPERAND (exp
, 0)) == COND_EXPR
)
2327 tree lhs
= fold_build2 (MINUS_EXPR
, type
,
2328 TREE_OPERAND (TREE_OPERAND (exp
, 0), 1),
2329 TREE_OPERAND (exp
, 1));
2330 tree rhs
= fold_build2 (MINUS_EXPR
, type
,
2331 TREE_OPERAND (TREE_OPERAND (exp
, 0), 2),
2332 TREE_OPERAND (exp
, 1));
2333 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2334 max_size (lhs
, max_p
),
2335 max_size (rhs
, max_p
));
2339 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2340 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2341 code
== MINUS_EXPR
? !max_p
: max_p
);
2343 /* Special-case wanting the maximum value of a MIN_EXPR.
2344 In that case, if one side overflows, return the other.
2345 sizetype is signed, but we know sizes are non-negative.
2346 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2347 overflowing or the maximum possible value and the RHS
2351 && TREE_CODE (rhs
) == INTEGER_CST
2352 && TREE_OVERFLOW (rhs
))
2356 && TREE_CODE (lhs
) == INTEGER_CST
2357 && TREE_OVERFLOW (lhs
))
2359 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2360 && ((TREE_CODE (lhs
) == INTEGER_CST
2361 && TREE_OVERFLOW (lhs
))
2362 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2363 && !TREE_CONSTANT (rhs
))
2366 return fold_build2 (code
, type
, lhs
, rhs
);
2370 if (code
== SAVE_EXPR
)
2372 else if (code
== COND_EXPR
)
2373 return fold_build2 (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2374 max_size (TREE_OPERAND (exp
, 1), max_p
),
2375 max_size (TREE_OPERAND (exp
, 2), max_p
));
2378 /* Other tree classes cannot happen. */
2386 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2387 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2388 Return a constructor for the template. */
2391 build_template (tree template_type
, tree array_type
, tree expr
)
2393 tree template_elts
= NULL_TREE
;
2394 tree bound_list
= NULL_TREE
;
2397 while (TREE_CODE (array_type
) == RECORD_TYPE
2398 && (TYPE_IS_PADDING_P (array_type
)
2399 || TYPE_JUSTIFIED_MODULAR_P (array_type
)))
2400 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2402 if (TREE_CODE (array_type
) == ARRAY_TYPE
2403 || (TREE_CODE (array_type
) == INTEGER_TYPE
2404 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2405 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2407 /* First make the list for a CONSTRUCTOR for the template. Go down the
2408 field list of the template instead of the type chain because this
2409 array might be an Ada array of arrays and we can't tell where the
2410 nested arrays stop being the underlying object. */
2412 for (field
= TYPE_FIELDS (template_type
); field
;
2414 ? (bound_list
= TREE_CHAIN (bound_list
))
2415 : (array_type
= TREE_TYPE (array_type
))),
2416 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2418 tree bounds
, min
, max
;
2420 /* If we have a bound list, get the bounds from there. Likewise
2421 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2422 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2423 This will give us a maximum range. */
2425 bounds
= TREE_VALUE (bound_list
);
2426 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2427 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2428 else if (expr
&& TREE_CODE (expr
) == PARM_DECL
2429 && DECL_BY_COMPONENT_PTR_P (expr
))
2430 bounds
= TREE_TYPE (field
);
2434 min
= convert (TREE_TYPE (field
), TYPE_MIN_VALUE (bounds
));
2435 max
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MAX_VALUE (bounds
));
2437 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2438 substitute it from OBJECT. */
2439 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2440 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2442 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2443 tree_cons (field
, min
, template_elts
));
2446 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2449 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2450 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2451 in the type contains in its DECL_INITIAL the expression to use when
2452 a constructor is made for the type. GNAT_ENTITY is an entity used
2453 to print out an error message if the mechanism cannot be applied to
2454 an object of that type and also for the name. */
2457 build_vms_descriptor32 (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2459 tree record_type
= make_node (RECORD_TYPE
);
2460 tree pointer32_type
;
2461 tree field_list
= 0;
2470 /* If TYPE is an unconstrained array, use the underlying array type. */
2471 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2472 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2474 /* If this is an array, compute the number of dimensions in the array,
2475 get the index types, and point to the inner type. */
2476 if (TREE_CODE (type
) != ARRAY_TYPE
)
2479 for (ndim
= 1, inner_type
= type
;
2480 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2481 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2482 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2485 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2487 if (mech
!= By_Descriptor_NCA
&& mech
!= By_Short_Descriptor_NCA
2488 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2489 for (i
= ndim
- 1, inner_type
= type
;
2491 i
--, inner_type
= TREE_TYPE (inner_type
))
2492 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2494 for (i
= 0, inner_type
= type
;
2496 i
++, inner_type
= TREE_TYPE (inner_type
))
2497 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2499 /* Now get the DTYPE value. */
2500 switch (TREE_CODE (type
))
2505 if (TYPE_VAX_FLOATING_POINT_P (type
))
2506 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2519 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2522 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2525 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2528 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2531 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2534 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2540 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2544 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2545 && TYPE_VAX_FLOATING_POINT_P (type
))
2546 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2558 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2569 /* Get the CLASS value. */
2572 case By_Descriptor_A
:
2573 case By_Short_Descriptor_A
:
2576 case By_Descriptor_NCA
:
2577 case By_Short_Descriptor_NCA
:
2580 case By_Descriptor_SB
:
2581 case By_Short_Descriptor_SB
:
2585 case By_Short_Descriptor
:
2586 case By_Descriptor_S
:
2587 case By_Short_Descriptor_S
:
2593 /* Make the type for a descriptor for VMS. The first four fields
2594 are the same for all types. */
2597 = chainon (field_list
,
2598 make_descriptor_field
2599 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2600 size_in_bytes ((mech
== By_Descriptor_A
||
2601 mech
== By_Short_Descriptor_A
)
2602 ? inner_type
: type
)));
2604 field_list
= chainon (field_list
,
2605 make_descriptor_field ("DTYPE",
2606 gnat_type_for_size (8, 1),
2607 record_type
, size_int (dtype
)));
2608 field_list
= chainon (field_list
,
2609 make_descriptor_field ("CLASS",
2610 gnat_type_for_size (8, 1),
2611 record_type
, size_int (class)));
2613 /* Of course this will crash at run-time if the address space is not
2614 within the low 32 bits, but there is nothing else we can do. */
2615 pointer32_type
= build_pointer_type_for_mode (type
, SImode
, false);
2618 = chainon (field_list
,
2619 make_descriptor_field
2620 ("POINTER", pointer32_type
, record_type
,
2621 build_unary_op (ADDR_EXPR
,
2623 build0 (PLACEHOLDER_EXPR
, type
))));
2628 case By_Short_Descriptor
:
2629 case By_Descriptor_S
:
2630 case By_Short_Descriptor_S
:
2633 case By_Descriptor_SB
:
2634 case By_Short_Descriptor_SB
:
2636 = chainon (field_list
,
2637 make_descriptor_field
2638 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2639 TREE_CODE (type
) == ARRAY_TYPE
2640 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2642 = chainon (field_list
,
2643 make_descriptor_field
2644 ("SB_U1", gnat_type_for_size (32, 1), record_type
,
2645 TREE_CODE (type
) == ARRAY_TYPE
2646 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2649 case By_Descriptor_A
:
2650 case By_Short_Descriptor_A
:
2651 case By_Descriptor_NCA
:
2652 case By_Short_Descriptor_NCA
:
2653 field_list
= chainon (field_list
,
2654 make_descriptor_field ("SCALE",
2655 gnat_type_for_size (8, 1),
2659 field_list
= chainon (field_list
,
2660 make_descriptor_field ("DIGITS",
2661 gnat_type_for_size (8, 1),
2666 = chainon (field_list
,
2667 make_descriptor_field
2668 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2669 size_int ((mech
== By_Descriptor_NCA
||
2670 mech
== By_Short_Descriptor_NCA
)
2672 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2673 : (TREE_CODE (type
) == ARRAY_TYPE
2674 && TYPE_CONVENTION_FORTRAN_P (type
)
2677 field_list
= chainon (field_list
,
2678 make_descriptor_field ("DIMCT",
2679 gnat_type_for_size (8, 1),
2683 field_list
= chainon (field_list
,
2684 make_descriptor_field ("ARSIZE",
2685 gnat_type_for_size (32, 1),
2687 size_in_bytes (type
)));
2689 /* Now build a pointer to the 0,0,0... element. */
2690 tem
= build0 (PLACEHOLDER_EXPR
, type
);
2691 for (i
= 0, inner_type
= type
; i
< ndim
;
2692 i
++, inner_type
= TREE_TYPE (inner_type
))
2693 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2694 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
2695 NULL_TREE
, NULL_TREE
);
2698 = chainon (field_list
,
2699 make_descriptor_field
2701 build_pointer_type_for_mode (inner_type
, SImode
, false),
2704 build_pointer_type_for_mode (inner_type
, SImode
,
2708 /* Next come the addressing coefficients. */
2709 tem
= size_one_node
;
2710 for (i
= 0; i
< ndim
; i
++)
2714 = size_binop (MULT_EXPR
, tem
,
2715 size_binop (PLUS_EXPR
,
2716 size_binop (MINUS_EXPR
,
2717 TYPE_MAX_VALUE (idx_arr
[i
]),
2718 TYPE_MIN_VALUE (idx_arr
[i
])),
2721 fname
[0] = ((mech
== By_Descriptor_NCA
||
2722 mech
== By_Short_Descriptor_NCA
) ? 'S' : 'M');
2723 fname
[1] = '0' + i
, fname
[2] = 0;
2725 = chainon (field_list
,
2726 make_descriptor_field (fname
,
2727 gnat_type_for_size (32, 1),
2728 record_type
, idx_length
));
2730 if (mech
== By_Descriptor_NCA
|| mech
== By_Short_Descriptor_NCA
)
2734 /* Finally here are the bounds. */
2735 for (i
= 0; i
< ndim
; i
++)
2739 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2741 = chainon (field_list
,
2742 make_descriptor_field
2743 (fname
, gnat_type_for_size (32, 1), record_type
,
2744 TYPE_MIN_VALUE (idx_arr
[i
])));
2748 = chainon (field_list
,
2749 make_descriptor_field
2750 (fname
, gnat_type_for_size (32, 1), record_type
,
2751 TYPE_MAX_VALUE (idx_arr
[i
])));
2756 post_error ("unsupported descriptor type for &", gnat_entity
);
2759 TYPE_NAME (record_type
) = create_concat_name (gnat_entity
, "DESC");
2760 finish_record_type (record_type
, field_list
, 0, true);
2764 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2765 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2766 in the type contains in its DECL_INITIAL the expression to use when
2767 a constructor is made for the type. GNAT_ENTITY is an entity used
2768 to print out an error message if the mechanism cannot be applied to
2769 an object of that type and also for the name. */
2772 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2774 tree record64_type
= make_node (RECORD_TYPE
);
2775 tree pointer64_type
;
2776 tree field_list64
= 0;
2785 /* If TYPE is an unconstrained array, use the underlying array type. */
2786 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2787 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2789 /* If this is an array, compute the number of dimensions in the array,
2790 get the index types, and point to the inner type. */
2791 if (TREE_CODE (type
) != ARRAY_TYPE
)
2794 for (ndim
= 1, inner_type
= type
;
2795 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2796 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2797 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2800 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2802 if (mech
!= By_Descriptor_NCA
2803 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2804 for (i
= ndim
- 1, inner_type
= type
;
2806 i
--, inner_type
= TREE_TYPE (inner_type
))
2807 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2809 for (i
= 0, inner_type
= type
;
2811 i
++, inner_type
= TREE_TYPE (inner_type
))
2812 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2814 /* Now get the DTYPE value. */
2815 switch (TREE_CODE (type
))
2820 if (TYPE_VAX_FLOATING_POINT_P (type
))
2821 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2834 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2837 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2840 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2843 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2846 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2849 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2855 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2859 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2860 && TYPE_VAX_FLOATING_POINT_P (type
))
2861 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2873 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2884 /* Get the CLASS value. */
2887 case By_Descriptor_A
:
2890 case By_Descriptor_NCA
:
2893 case By_Descriptor_SB
:
2897 case By_Descriptor_S
:
2903 /* Make the type for a 64bit descriptor for VMS. The first six fields
2904 are the same for all types. */
2906 field_list64
= chainon (field_list64
,
2907 make_descriptor_field ("MBO",
2908 gnat_type_for_size (16, 1),
2909 record64_type
, size_int (1)));
2911 field_list64
= chainon (field_list64
,
2912 make_descriptor_field ("DTYPE",
2913 gnat_type_for_size (8, 1),
2914 record64_type
, size_int (dtype
)));
2915 field_list64
= chainon (field_list64
,
2916 make_descriptor_field ("CLASS",
2917 gnat_type_for_size (8, 1),
2918 record64_type
, size_int (class)));
2920 field_list64
= chainon (field_list64
,
2921 make_descriptor_field ("MBMO",
2922 gnat_type_for_size (32, 1),
2923 record64_type
, ssize_int (-1)));
2926 = chainon (field_list64
,
2927 make_descriptor_field
2928 ("LENGTH", gnat_type_for_size (64, 1), record64_type
,
2929 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2931 pointer64_type
= build_pointer_type_for_mode (type
, DImode
, false);
2934 = chainon (field_list64
,
2935 make_descriptor_field
2936 ("POINTER", pointer64_type
, record64_type
,
2937 build_unary_op (ADDR_EXPR
,
2939 build0 (PLACEHOLDER_EXPR
, type
))));
2944 case By_Descriptor_S
:
2947 case By_Descriptor_SB
:
2949 = chainon (field_list64
,
2950 make_descriptor_field
2951 ("SB_L1", gnat_type_for_size (64, 1), record64_type
,
2952 TREE_CODE (type
) == ARRAY_TYPE
2953 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2955 = chainon (field_list64
,
2956 make_descriptor_field
2957 ("SB_U1", gnat_type_for_size (64, 1), record64_type
,
2958 TREE_CODE (type
) == ARRAY_TYPE
2959 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2962 case By_Descriptor_A
:
2963 case By_Descriptor_NCA
:
2964 field_list64
= chainon (field_list64
,
2965 make_descriptor_field ("SCALE",
2966 gnat_type_for_size (8, 1),
2970 field_list64
= chainon (field_list64
,
2971 make_descriptor_field ("DIGITS",
2972 gnat_type_for_size (8, 1),
2977 = chainon (field_list64
,
2978 make_descriptor_field
2979 ("AFLAGS", gnat_type_for_size (8, 1), record64_type
,
2980 size_int (mech
== By_Descriptor_NCA
2982 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2983 : (TREE_CODE (type
) == ARRAY_TYPE
2984 && TYPE_CONVENTION_FORTRAN_P (type
)
2987 field_list64
= chainon (field_list64
,
2988 make_descriptor_field ("DIMCT",
2989 gnat_type_for_size (8, 1),
2993 field_list64
= chainon (field_list64
,
2994 make_descriptor_field ("MBZ",
2995 gnat_type_for_size (32, 1),
2998 field_list64
= chainon (field_list64
,
2999 make_descriptor_field ("ARSIZE",
3000 gnat_type_for_size (64, 1),
3002 size_in_bytes (type
)));
3004 /* Now build a pointer to the 0,0,0... element. */
3005 tem
= build0 (PLACEHOLDER_EXPR
, type
);
3006 for (i
= 0, inner_type
= type
; i
< ndim
;
3007 i
++, inner_type
= TREE_TYPE (inner_type
))
3008 tem
= build4 (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
3009 convert (TYPE_DOMAIN (inner_type
), size_zero_node
),
3010 NULL_TREE
, NULL_TREE
);
3013 = chainon (field_list64
,
3014 make_descriptor_field
3016 build_pointer_type_for_mode (inner_type
, DImode
, false),
3019 build_pointer_type_for_mode (inner_type
, DImode
,
3023 /* Next come the addressing coefficients. */
3024 tem
= size_one_node
;
3025 for (i
= 0; i
< ndim
; i
++)
3029 = size_binop (MULT_EXPR
, tem
,
3030 size_binop (PLUS_EXPR
,
3031 size_binop (MINUS_EXPR
,
3032 TYPE_MAX_VALUE (idx_arr
[i
]),
3033 TYPE_MIN_VALUE (idx_arr
[i
])),
3036 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
3037 fname
[1] = '0' + i
, fname
[2] = 0;
3039 = chainon (field_list64
,
3040 make_descriptor_field (fname
,
3041 gnat_type_for_size (64, 1),
3042 record64_type
, idx_length
));
3044 if (mech
== By_Descriptor_NCA
)
3048 /* Finally here are the bounds. */
3049 for (i
= 0; i
< ndim
; i
++)
3053 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
3055 = chainon (field_list64
,
3056 make_descriptor_field
3057 (fname
, gnat_type_for_size (64, 1), record64_type
,
3058 TYPE_MIN_VALUE (idx_arr
[i
])));
3062 = chainon (field_list64
,
3063 make_descriptor_field
3064 (fname
, gnat_type_for_size (64, 1), record64_type
,
3065 TYPE_MAX_VALUE (idx_arr
[i
])));
3070 post_error ("unsupported descriptor type for &", gnat_entity
);
3073 TYPE_NAME (record64_type
) = create_concat_name (gnat_entity
, "DESC64");
3074 finish_record_type (record64_type
, field_list64
, 0, true);
3075 return record64_type
;
3078 /* Utility routine for above code to make a field. */
3081 make_descriptor_field (const char *name
, tree type
,
3082 tree rec_type
, tree initial
)
3085 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
3087 DECL_INITIAL (field
) = initial
;
3091 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3092 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3093 which the VMS descriptor is passed. */
3096 convert_vms_descriptor64 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3098 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3099 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3100 /* The CLASS field is the 3rd field in the descriptor. */
3101 tree
class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type
)));
3102 /* The POINTER field is the 6th field in the descriptor. */
3103 tree pointer64
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3105 /* Retrieve the value of the POINTER field. */
3107 = build3 (COMPONENT_REF
, TREE_TYPE (pointer64
), desc
, pointer64
, NULL_TREE
);
3109 if (POINTER_TYPE_P (gnu_type
))
3110 return convert (gnu_type
, gnu_expr64
);
3112 else if (TYPE_FAT_POINTER_P (gnu_type
))
3114 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3115 tree p_bounds_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type
)));
3116 tree template_type
= TREE_TYPE (p_bounds_type
);
3117 tree min_field
= TYPE_FIELDS (template_type
);
3118 tree max_field
= TREE_CHAIN (TYPE_FIELDS (template_type
));
3119 tree
template, template_addr
, aflags
, dimct
, t
, u
;
3120 /* See the head comment of build_vms_descriptor. */
3121 int iclass
= TREE_INT_CST_LOW (DECL_INITIAL (class));
3122 tree lfield
, ufield
;
3124 /* Convert POINTER to the type of the P_ARRAY field. */
3125 gnu_expr64
= convert (p_array_type
, gnu_expr64
);
3129 case 1: /* Class S */
3130 case 15: /* Class SB */
3131 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3132 t
= TREE_CHAIN (TREE_CHAIN (class));
3133 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3134 t
= tree_cons (min_field
,
3135 convert (TREE_TYPE (min_field
), integer_one_node
),
3136 tree_cons (max_field
,
3137 convert (TREE_TYPE (max_field
), t
),
3139 template = gnat_build_constructor (template_type
, t
);
3140 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
3142 /* For class S, we are done. */
3146 /* Test that we really have a SB descriptor, like DEC Ada. */
3147 t
= build3 (COMPONENT_REF
, TREE_TYPE (class), desc
, class, NULL
);
3148 u
= convert (TREE_TYPE (class), DECL_INITIAL (class));
3149 u
= build_binary_op (EQ_EXPR
, integer_type_node
, t
, u
);
3150 /* If so, there is already a template in the descriptor and
3151 it is located right after the POINTER field. The fields are
3152 64bits so they must be repacked. */
3153 t
= TREE_CHAIN (pointer64
);
3154 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3155 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3158 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3160 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3162 /* Build the template in the form of a constructor. */
3163 t
= tree_cons (TYPE_FIELDS (template_type
), lfield
,
3164 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type
)),
3165 ufield
, NULL_TREE
));
3166 template = gnat_build_constructor (template_type
, t
);
3168 /* Otherwise use the {1, LENGTH} template we build above. */
3169 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3170 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3175 case 4: /* Class A */
3176 /* The AFLAGS field is the 3rd field after the pointer in the
3178 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64
)));
3179 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3180 /* The DIMCT field is the next field in the descriptor after
3183 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3184 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3185 or FL_COEFF or FL_BOUNDS not set. */
3186 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3187 u
= build_binary_op (TRUTH_OR_EXPR
, integer_type_node
,
3188 build_binary_op (NE_EXPR
, integer_type_node
,
3190 convert (TREE_TYPE (dimct
),
3192 build_binary_op (NE_EXPR
, integer_type_node
,
3193 build2 (BIT_AND_EXPR
,
3197 /* There is already a template in the descriptor and it is located
3198 in block 3. The fields are 64bits so they must be repacked. */
3199 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3201 lfield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3202 lfield
= convert (TREE_TYPE (TYPE_FIELDS (template_type
)), lfield
);
3205 ufield
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3207 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type
))), ufield
);
3209 /* Build the template in the form of a constructor. */
3210 t
= tree_cons (TYPE_FIELDS (template_type
), lfield
,
3211 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type
)),
3212 ufield
, NULL_TREE
));
3213 template = gnat_build_constructor (template_type
, t
);
3214 template = build3 (COND_EXPR
, p_bounds_type
, u
,
3215 build_call_raise (CE_Length_Check_Failed
, Empty
,
3216 N_Raise_Constraint_Error
),
3218 template_addr
= build_unary_op (ADDR_EXPR
, p_bounds_type
, template);
3221 case 10: /* Class NCA */
3223 post_error ("unsupported descriptor type for &", gnat_subprog
);
3224 template_addr
= integer_zero_node
;
3228 /* Build the fat pointer in the form of a constructor. */
3229 t
= tree_cons (TYPE_FIELDS (gnu_type
), gnu_expr64
,
3230 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type
)),
3231 template_addr
, NULL_TREE
));
3232 return gnat_build_constructor (gnu_type
, t
);
3239 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3240 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3241 which the VMS descriptor is passed. */
3244 convert_vms_descriptor32 (tree gnu_type
, tree gnu_expr
, Entity_Id gnat_subprog
)
3246 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3247 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3248 /* The CLASS field is the 3rd field in the descriptor. */
3249 tree
class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type
)));
3250 /* The POINTER field is the 4th field in the descriptor. */
3251 tree pointer
= TREE_CHAIN (class);
3253 /* Retrieve the value of the POINTER field. */
3255 = build3 (COMPONENT_REF
, TREE_TYPE (pointer
), desc
, pointer
, NULL_TREE
);
3257 if (POINTER_TYPE_P (gnu_type
))
3258 return convert (gnu_type
, gnu_expr32
);
3260 else if (TYPE_FAT_POINTER_P (gnu_type
))
3262 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (gnu_type
));
3263 tree p_bounds_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type
)));
3264 tree template_type
= TREE_TYPE (p_bounds_type
);
3265 tree min_field
= TYPE_FIELDS (template_type
);
3266 tree max_field
= TREE_CHAIN (TYPE_FIELDS (template_type
));
3267 tree
template, template_addr
, aflags
, dimct
, t
, u
;
3268 /* See the head comment of build_vms_descriptor. */
3269 int iclass
= TREE_INT_CST_LOW (DECL_INITIAL (class));
3271 /* Convert POINTER to the type of the P_ARRAY field. */
3272 gnu_expr32
= convert (p_array_type
, gnu_expr32
);
3276 case 1: /* Class S */
3277 case 15: /* Class SB */
3278 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3279 t
= TYPE_FIELDS (desc_type
);
3280 t
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3281 t
= tree_cons (min_field
,
3282 convert (TREE_TYPE (min_field
), integer_one_node
),
3283 tree_cons (max_field
,
3284 convert (TREE_TYPE (max_field
), t
),
3286 template = gnat_build_constructor (template_type
, t
);
3287 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
3289 /* For class S, we are done. */
3293 /* Test that we really have a SB descriptor, like DEC Ada. */
3294 t
= build3 (COMPONENT_REF
, TREE_TYPE (class), desc
, class, NULL
);
3295 u
= convert (TREE_TYPE (class), DECL_INITIAL (class));
3296 u
= build_binary_op (EQ_EXPR
, integer_type_node
, t
, u
);
3297 /* If so, there is already a template in the descriptor and
3298 it is located right after the POINTER field. */
3299 t
= TREE_CHAIN (pointer
);
3300 template = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3301 /* Otherwise use the {1, LENGTH} template we build above. */
3302 template_addr
= build3 (COND_EXPR
, p_bounds_type
, u
,
3303 build_unary_op (ADDR_EXPR
, p_bounds_type
,
3308 case 4: /* Class A */
3309 /* The AFLAGS field is the 7th field in the descriptor. */
3310 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer
)));
3311 aflags
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3312 /* The DIMCT field is the 8th field in the descriptor. */
3314 dimct
= build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3315 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3316 or FL_COEFF or FL_BOUNDS not set. */
3317 u
= build_int_cst (TREE_TYPE (aflags
), 192);
3318 u
= build_binary_op (TRUTH_OR_EXPR
, integer_type_node
,
3319 build_binary_op (NE_EXPR
, integer_type_node
,
3321 convert (TREE_TYPE (dimct
),
3323 build_binary_op (NE_EXPR
, integer_type_node
,
3324 build2 (BIT_AND_EXPR
,
3328 /* There is already a template in the descriptor and it is
3329 located at the start of block 3 (12th field). */
3330 t
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t
))));
3331 template = build3 (COMPONENT_REF
, TREE_TYPE (t
), desc
, t
, NULL_TREE
);
3332 template = build3 (COND_EXPR
, p_bounds_type
, u
,
3333 build_call_raise (CE_Length_Check_Failed
, Empty
,
3334 N_Raise_Constraint_Error
),
3336 template_addr
= build_unary_op (ADDR_EXPR
, p_bounds_type
, template);
3339 case 10: /* Class NCA */
3341 post_error ("unsupported descriptor type for &", gnat_subprog
);
3342 template_addr
= integer_zero_node
;
3346 /* Build the fat pointer in the form of a constructor. */
3347 t
= tree_cons (TYPE_FIELDS (gnu_type
), gnu_expr32
,
3348 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type
)),
3349 template_addr
, NULL_TREE
));
3351 return gnat_build_constructor (gnu_type
, t
);
3358 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3359 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3360 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3361 VMS descriptor is passed. */
3364 convert_vms_descriptor (tree gnu_type
, tree gnu_expr
, tree gnu_expr_alt_type
,
3365 Entity_Id gnat_subprog
)
3367 tree desc_type
= TREE_TYPE (TREE_TYPE (gnu_expr
));
3368 tree desc
= build1 (INDIRECT_REF
, desc_type
, gnu_expr
);
3369 tree mbo
= TYPE_FIELDS (desc_type
);
3370 const char *mbostr
= IDENTIFIER_POINTER (DECL_NAME (mbo
));
3371 tree mbmo
= TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo
)));
3372 tree is64bit
, gnu_expr32
, gnu_expr64
;
3374 /* If the field name is not MBO, it must be 32-bit and no alternate.
3375 Otherwise primary must be 64-bit and alternate 32-bit. */
3376 if (strcmp (mbostr
, "MBO") != 0)
3377 return convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
3379 /* Build the test for 64-bit descriptor. */
3380 mbo
= build3 (COMPONENT_REF
, TREE_TYPE (mbo
), desc
, mbo
, NULL_TREE
);
3381 mbmo
= build3 (COMPONENT_REF
, TREE_TYPE (mbmo
), desc
, mbmo
, NULL_TREE
);
3383 = build_binary_op (TRUTH_ANDIF_EXPR
, integer_type_node
,
3384 build_binary_op (EQ_EXPR
, integer_type_node
,
3385 convert (integer_type_node
, mbo
),
3387 build_binary_op (EQ_EXPR
, integer_type_node
,
3388 convert (integer_type_node
, mbmo
),
3389 integer_minus_one_node
));
3391 /* Build the 2 possible end results. */
3392 gnu_expr64
= convert_vms_descriptor64 (gnu_type
, gnu_expr
, gnat_subprog
);
3393 gnu_expr
= fold_convert (gnu_expr_alt_type
, gnu_expr
);
3394 gnu_expr32
= convert_vms_descriptor32 (gnu_type
, gnu_expr
, gnat_subprog
);
3396 return build3 (COND_EXPR
, gnu_type
, is64bit
, gnu_expr64
, gnu_expr32
);
3399 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3400 and the GNAT node GNAT_SUBPROG. */
3403 build_function_stub (tree gnu_subprog
, Entity_Id gnat_subprog
)
3405 tree gnu_subprog_type
, gnu_subprog_addr
, gnu_subprog_call
;
3406 tree gnu_stub_param
, gnu_param_list
, gnu_arg_types
, gnu_param
;
3407 tree gnu_stub_decl
= DECL_FUNCTION_STUB (gnu_subprog
);
3410 gnu_subprog_type
= TREE_TYPE (gnu_subprog
);
3411 gnu_param_list
= NULL_TREE
;
3413 begin_subprog_body (gnu_stub_decl
);
3416 start_stmt_group ();
3418 /* Loop over the parameters of the stub and translate any of them
3419 passed by descriptor into a by reference one. */
3420 for (gnu_stub_param
= DECL_ARGUMENTS (gnu_stub_decl
),
3421 gnu_arg_types
= TYPE_ARG_TYPES (gnu_subprog_type
);
3423 gnu_stub_param
= TREE_CHAIN (gnu_stub_param
),
3424 gnu_arg_types
= TREE_CHAIN (gnu_arg_types
))
3426 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param
))
3428 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types
),
3430 DECL_PARM_ALT_TYPE (gnu_stub_param
),
3433 gnu_param
= gnu_stub_param
;
3435 gnu_param_list
= tree_cons (NULL_TREE
, gnu_param
, gnu_param_list
);
3438 gnu_body
= end_stmt_group ();
3440 /* Invoke the internal subprogram. */
3441 gnu_subprog_addr
= build1 (ADDR_EXPR
, build_pointer_type (gnu_subprog_type
),
3443 gnu_subprog_call
= build_call_list (TREE_TYPE (gnu_subprog_type
),
3445 nreverse (gnu_param_list
));
3447 /* Propagate the return value, if any. */
3448 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type
)))
3449 append_to_statement_list (gnu_subprog_call
, &gnu_body
);
3451 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl
),
3457 allocate_struct_function (gnu_stub_decl
, false);
3458 end_subprog_body (gnu_body
, false);
3461 /* Build a type to be used to represent an aliased object whose nominal
3462 type is an unconstrained array. This consists of a RECORD_TYPE containing
3463 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3464 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3465 is used to represent an arbitrary unconstrained object. Use NAME
3466 as the name of the record. */
3469 build_unc_object_type (tree template_type
, tree object_type
, tree name
)
3471 tree type
= make_node (RECORD_TYPE
);
3472 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
3473 template_type
, type
, 0, 0, 0, 1);
3474 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
3477 TYPE_NAME (type
) = name
;
3478 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
3479 finish_record_type (type
,
3480 chainon (chainon (NULL_TREE
, template_field
),
3487 /* Same, taking a thin or fat pointer type instead of a template type. */
3490 build_unc_object_type_from_ptr (tree thin_fat_ptr_type
, tree object_type
,
3495 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type
));
3498 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type
)
3499 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type
))))
3500 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type
))));
3501 return build_unc_object_type (template_type
, object_type
, name
);
3504 /* Shift the component offsets within an unconstrained object TYPE to make it
3505 suitable for use as a designated type for thin pointers. */
3508 shift_unc_components_for_thin_pointers (tree type
)
3510 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3511 allocated past the BOUNDS template. The designated type is adjusted to
3512 have ARRAY at position zero and the template at a negative offset, so
3513 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3515 tree bounds_field
= TYPE_FIELDS (type
);
3516 tree array_field
= TREE_CHAIN (TYPE_FIELDS (type
));
3518 DECL_FIELD_OFFSET (bounds_field
)
3519 = size_binop (MINUS_EXPR
, size_zero_node
, byte_position (array_field
));
3521 DECL_FIELD_OFFSET (array_field
) = size_zero_node
;
3522 DECL_FIELD_BIT_OFFSET (array_field
) = bitsize_zero_node
;
3525 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3526 In the normal case this is just two adjustments, but we have more to
3527 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3530 update_pointer_to (tree old_type
, tree new_type
)
3532 tree ptr
= TYPE_POINTER_TO (old_type
);
3533 tree ref
= TYPE_REFERENCE_TO (old_type
);
3537 /* If this is the main variant, process all the other variants first. */
3538 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
3539 for (type
= TYPE_NEXT_VARIANT (old_type
); type
;
3540 type
= TYPE_NEXT_VARIANT (type
))
3541 update_pointer_to (type
, new_type
);
3543 /* If no pointers and no references, we are done. */
3547 /* Merge the old type qualifiers in the new type.
3549 Each old variant has qualifiers for specific reasons, and the new
3550 designated type as well. Each set of qualifiers represents useful
3551 information grabbed at some point, and merging the two simply unifies
3552 these inputs into the final type description.
3554 Consider for instance a volatile type frozen after an access to constant
3555 type designating it; after the designated type's freeze, we get here with
3556 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3557 when the access type was processed. We will make a volatile and readonly
3558 designated type, because that's what it really is.
3560 We might also get here for a non-dummy OLD_TYPE variant with different
3561 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3562 to private record type elaboration (see the comments around the call to
3563 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3564 the qualifiers in those cases too, to avoid accidentally discarding the
3565 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3567 = build_qualified_type (new_type
,
3568 TYPE_QUALS (old_type
) | TYPE_QUALS (new_type
));
3570 /* If old type and new type are identical, there is nothing to do. */
3571 if (old_type
== new_type
)
3574 /* Otherwise, first handle the simple case. */
3575 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
3577 TYPE_POINTER_TO (new_type
) = ptr
;
3578 TYPE_REFERENCE_TO (new_type
) = ref
;
3580 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
3581 for (ptr1
= TYPE_MAIN_VARIANT (ptr
); ptr1
;
3582 ptr1
= TYPE_NEXT_VARIANT (ptr1
))
3583 TREE_TYPE (ptr1
) = new_type
;
3585 for (; ref
; ref
= TYPE_NEXT_REF_TO (ref
))
3586 for (ref1
= TYPE_MAIN_VARIANT (ref
); ref1
;
3587 ref1
= TYPE_NEXT_VARIANT (ref1
))
3588 TREE_TYPE (ref1
) = new_type
;
3591 /* Now deal with the unconstrained array case. In this case the "pointer"
3592 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3593 Turn them into pointers to the correct types using update_pointer_to. */
3594 else if (!TYPE_FAT_POINTER_P (ptr
))
3599 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
3600 tree array_field
= TYPE_FIELDS (ptr
);
3601 tree bounds_field
= TREE_CHAIN (TYPE_FIELDS (ptr
));
3602 tree new_ptr
= TYPE_POINTER_TO (new_type
);
3606 /* Make pointers to the dummy template point to the real template. */
3608 (TREE_TYPE (TREE_TYPE (bounds_field
)),
3609 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr
)))));
3611 /* The references to the template bounds present in the array type
3612 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3613 are updating PTR to make it a full replacement for NEW_PTR as
3614 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3615 to make it of type PTR. */
3616 new_ref
= build3 (COMPONENT_REF
, TREE_TYPE (bounds_field
),
3617 build0 (PLACEHOLDER_EXPR
, ptr
),
3618 bounds_field
, NULL_TREE
);
3620 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3621 to the dummy array point to it. */
3623 (TREE_TYPE (TREE_TYPE (array_field
)),
3624 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr
))),
3625 TREE_CHAIN (TYPE_FIELDS (new_ptr
)), new_ref
));
3627 /* Make PTR the pointer to NEW_TYPE. */
3628 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
3629 = TREE_TYPE (new_type
) = ptr
;
3631 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
3632 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
3634 /* Now handle updating the allocation record, what the thin pointer
3635 points to. Update all pointers from the old record into the new
3636 one, update the type of the array field, and recompute the size. */
3637 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
3639 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
3640 = TREE_TYPE (TREE_TYPE (array_field
));
3642 /* The size recomputation needs to account for alignment constraints, so
3643 we let layout_type work it out. This will reset the field offsets to
3644 what they would be in a regular record, so we shift them back to what
3645 we want them to be for a thin pointer designated type afterwards. */
3646 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)) = 0;
3647 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))) = 0;
3648 TYPE_SIZE (new_obj_rec
) = 0;
3649 layout_type (new_obj_rec
);
3651 shift_unc_components_for_thin_pointers (new_obj_rec
);
3653 /* We are done, at last. */
3654 rest_of_record_type_compilation (ptr
);
3658 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3659 unconstrained one. This involves making or finding a template. */
3662 convert_to_fat_pointer (tree type
, tree expr
)
3664 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
3665 tree p_array_type
= TREE_TYPE (TYPE_FIELDS (type
));
3666 tree etype
= TREE_TYPE (expr
);
3669 /* If EXPR is null, make a fat pointer that contains null pointers to the
3670 template and array. */
3671 if (integer_zerop (expr
))
3673 gnat_build_constructor
3675 tree_cons (TYPE_FIELDS (type
),
3676 convert (p_array_type
, expr
),
3677 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3678 convert (build_pointer_type (template_type
),
3682 /* If EXPR is a thin pointer, make template and data from the record.. */
3683 else if (TYPE_THIN_POINTER_P (etype
))
3685 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
3687 expr
= save_expr (expr
);
3688 if (TREE_CODE (expr
) == ADDR_EXPR
)
3689 expr
= TREE_OPERAND (expr
, 0);
3691 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
3693 template = build_component_ref (expr
, NULL_TREE
, fields
, false);
3694 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
3695 build_component_ref (expr
, NULL_TREE
,
3696 TREE_CHAIN (fields
), false));
3699 /* Otherwise, build the constructor for the template. */
3701 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
3703 /* The final result is a constructor for the fat pointer.
3705 If EXPR is an argument of a foreign convention subprogram, the type it
3706 points to is directly the component type. In this case, the expression
3707 type may not match the corresponding FIELD_DECL type at this point, so we
3708 call "convert" here to fix that up if necessary. This type consistency is
3709 required, for instance because it ensures that possible later folding of
3710 COMPONENT_REFs against this constructor always yields something of the
3711 same type as the initial reference.
3713 Note that the call to "build_template" above is still fine because it
3714 will only refer to the provided TEMPLATE_TYPE in this case. */
3716 gnat_build_constructor
3718 tree_cons (TYPE_FIELDS (type
),
3719 convert (p_array_type
, expr
),
3720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3721 build_unary_op (ADDR_EXPR
, NULL_TREE
, template),
3725 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3726 is something that is a fat pointer, so convert to it first if it EXPR
3727 is not already a fat pointer. */
3730 convert_to_thin_pointer (tree type
, tree expr
)
3732 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
3734 = convert_to_fat_pointer
3735 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
3737 /* We get the pointer to the data and use a NOP_EXPR to make it the
3739 expr
= build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)),
3741 expr
= build1 (NOP_EXPR
, type
, expr
);
3746 /* Create an expression whose value is that of EXPR,
3747 converted to type TYPE. The TREE_TYPE of the value
3748 is always TYPE. This function implements all reasonable
3749 conversions; callers should filter out those that are
3750 not permitted by the language being compiled. */
3753 convert (tree type
, tree expr
)
3755 enum tree_code code
= TREE_CODE (type
);
3756 tree etype
= TREE_TYPE (expr
);
3757 enum tree_code ecode
= TREE_CODE (etype
);
3759 /* If EXPR is already the right type, we are done. */
3763 /* If both input and output have padding and are of variable size, do this
3764 as an unchecked conversion. Likewise if one is a mere variant of the
3765 other, so we avoid a pointless unpad/repad sequence. */
3766 else if (code
== RECORD_TYPE
&& ecode
== RECORD_TYPE
3767 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
3768 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3769 || !TREE_CONSTANT (TYPE_SIZE (etype
))
3770 || gnat_types_compatible_p (type
, etype
)
3771 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
)))
3772 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype
)))))
3775 /* If the output type has padding, convert to the inner type and
3776 make a constructor to build the record. */
3777 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
3779 /* If we previously converted from another type and our type is
3780 of variable size, remove the conversion to avoid the need for
3781 variable-size temporaries. Likewise for a conversion between
3782 original and packable version. */
3783 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3784 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3785 || (ecode
== RECORD_TYPE
3786 && TYPE_NAME (etype
)
3787 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr
, 0))))))
3788 expr
= TREE_OPERAND (expr
, 0);
3790 /* If we are just removing the padding from expr, convert the original
3791 object if we have variable size in order to avoid the need for some
3792 variable-size temporaries. Likewise if the padding is a mere variant
3793 of the other, so we avoid a pointless unpad/repad sequence. */
3794 if (TREE_CODE (expr
) == COMPONENT_REF
3795 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
3796 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
3797 && (!TREE_CONSTANT (TYPE_SIZE (type
))
3798 || gnat_types_compatible_p (type
,
3799 TREE_TYPE (TREE_OPERAND (expr
, 0)))
3800 || (ecode
== RECORD_TYPE
3801 && TYPE_NAME (etype
)
3802 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type
))))))
3803 return convert (type
, TREE_OPERAND (expr
, 0));
3805 /* If the result type is a padded type with a self-referentially-sized
3806 field and the expression type is a record, do this as an
3807 unchecked conversion. */
3808 else if (TREE_CODE (etype
) == RECORD_TYPE
3809 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
3810 return unchecked_convert (type
, expr
, false);
3814 gnat_build_constructor (type
,
3815 tree_cons (TYPE_FIELDS (type
),
3817 (TYPE_FIELDS (type
)),
3822 /* If the input type has padding, remove it and convert to the output type.
3823 The conditions ordering is arranged to ensure that the output type is not
3824 a padding type here, as it is not clear whether the conversion would
3825 always be correct if this was to happen. */
3826 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
3830 /* If we have just converted to this padded type, just get the
3831 inner expression. */
3832 if (TREE_CODE (expr
) == CONSTRUCTOR
3833 && !VEC_empty (constructor_elt
, CONSTRUCTOR_ELTS (expr
))
3834 && VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->index
3835 == TYPE_FIELDS (etype
))
3837 = VEC_index (constructor_elt
, CONSTRUCTOR_ELTS (expr
), 0)->value
;
3839 /* Otherwise, build an explicit component reference. */
3842 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (etype
), false);
3844 return convert (type
, unpadded
);
3847 /* If the input is a biased type, adjust first. */
3848 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
3849 return convert (type
, fold_build2 (PLUS_EXPR
, TREE_TYPE (etype
),
3850 fold_convert (TREE_TYPE (etype
),
3852 TYPE_MIN_VALUE (etype
)));
3854 /* If the input is a justified modular type, we need to extract the actual
3855 object before converting it to any other type with the exceptions of an
3856 unconstrained array or of a mere type variant. It is useful to avoid the
3857 extraction and conversion in the type variant case because it could end
3858 up replacing a VAR_DECL expr by a constructor and we might be about the
3859 take the address of the result. */
3860 if (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)
3861 && code
!= UNCONSTRAINED_ARRAY_TYPE
3862 && TYPE_MAIN_VARIANT (type
) != TYPE_MAIN_VARIANT (etype
))
3863 return convert (type
, build_component_ref (expr
, NULL_TREE
,
3864 TYPE_FIELDS (etype
), false));
3866 /* If converting to a type that contains a template, convert to the data
3867 type and then build the template. */
3868 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
3870 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
3872 /* If the source already has a template, get a reference to the
3873 associated array only, as we are going to rebuild a template
3874 for the target type anyway. */
3875 expr
= maybe_unconstrained_array (expr
);
3878 gnat_build_constructor
3880 tree_cons (TYPE_FIELDS (type
),
3881 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
3882 obj_type
, NULL_TREE
),
3883 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
3884 convert (obj_type
, expr
), NULL_TREE
)));
3887 /* There are some special cases of expressions that we process
3889 switch (TREE_CODE (expr
))
3895 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3896 conversion in gnat_expand_expr. NULL_EXPR does not represent
3897 and actual value, so no conversion is needed. */
3898 expr
= copy_node (expr
);
3899 TREE_TYPE (expr
) = type
;
3903 /* If we are converting a STRING_CST to another constrained array type,
3904 just make a new one in the proper type. */
3905 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
3906 && !(TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
3907 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
))
3909 expr
= copy_node (expr
);
3910 TREE_TYPE (expr
) = type
;
3916 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3917 a new one in the proper type. */
3918 if (code
== ecode
&& gnat_types_compatible_p (type
, etype
))
3920 expr
= copy_node (expr
);
3921 TREE_TYPE (expr
) = type
;
3925 /* Likewise for a conversion between original and packable version, but
3926 we have to work harder in order to preserve type consistency. */
3928 && code
== RECORD_TYPE
3929 && TYPE_NAME (type
) == TYPE_NAME (etype
))
3931 VEC(constructor_elt
,gc
) *e
= CONSTRUCTOR_ELTS (expr
);
3932 unsigned HOST_WIDE_INT len
= VEC_length (constructor_elt
, e
);
3933 VEC(constructor_elt
,gc
) *v
= VEC_alloc (constructor_elt
, gc
, len
);
3934 tree efield
= TYPE_FIELDS (etype
), field
= TYPE_FIELDS (type
);
3935 unsigned HOST_WIDE_INT idx
;
3938 FOR_EACH_CONSTRUCTOR_ELT(e
, idx
, index
, value
)
3940 constructor_elt
*elt
= VEC_quick_push (constructor_elt
, v
, NULL
);
3941 /* We expect only simple constructors. Otherwise, punt. */
3942 if (!(index
== efield
|| index
== DECL_ORIGINAL_FIELD (efield
)))
3945 elt
->value
= convert (TREE_TYPE (field
), value
);
3946 efield
= TREE_CHAIN (efield
);
3947 field
= TREE_CHAIN (field
);
3952 expr
= copy_node (expr
);
3953 TREE_TYPE (expr
) = type
;
3954 CONSTRUCTOR_ELTS (expr
) = v
;
3960 case UNCONSTRAINED_ARRAY_REF
:
3961 /* Convert this to the type of the inner array by getting the address of
3962 the array from the template. */
3963 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3964 build_component_ref (TREE_OPERAND (expr
, 0),
3965 get_identifier ("P_ARRAY"),
3967 etype
= TREE_TYPE (expr
);
3968 ecode
= TREE_CODE (etype
);
3971 case VIEW_CONVERT_EXPR
:
3973 /* GCC 4.x is very sensitive to type consistency overall, and view
3974 conversions thus are very frequent. Even though just "convert"ing
3975 the inner operand to the output type is fine in most cases, it
3976 might expose unexpected input/output type mismatches in special
3977 circumstances so we avoid such recursive calls when we can. */
3978 tree op0
= TREE_OPERAND (expr
, 0);
3980 /* If we are converting back to the original type, we can just
3981 lift the input conversion. This is a common occurrence with
3982 switches back-and-forth amongst type variants. */
3983 if (type
== TREE_TYPE (op0
))
3986 /* Otherwise, if we're converting between two aggregate types, we
3987 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3988 in place or to just convert the inner expression. */
3989 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
))
3991 /* If we are converting between mere variants, we can just
3992 substitute the VIEW_CONVERT_EXPR in place. */
3993 if (gnat_types_compatible_p (type
, etype
))
3994 return build1 (VIEW_CONVERT_EXPR
, type
, op0
);
3996 /* Otherwise, we may just bypass the input view conversion unless
3997 one of the types is a fat pointer, which is handled by
3998 specialized code below which relies on exact type matching. */
3999 else if (!TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
4000 return convert (type
, op0
);
4006 /* If both types are record types, just convert the pointer and
4007 make a new INDIRECT_REF.
4009 ??? Disable this for now since it causes problems with the
4010 code in build_binary_op for MODIFY_EXPR which wants to
4011 strip off conversions. But that code really is a mess and
4012 we need to do this a much better way some time. */
4014 && (TREE_CODE (type
) == RECORD_TYPE
4015 || TREE_CODE (type
) == UNION_TYPE
)
4016 && (TREE_CODE (etype
) == RECORD_TYPE
4017 || TREE_CODE (etype
) == UNION_TYPE
)
4018 && !TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
4019 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
4020 convert (build_pointer_type (type
),
4021 TREE_OPERAND (expr
, 0)));
4028 /* Check for converting to a pointer to an unconstrained array. */
4029 if (TYPE_FAT_POINTER_P (type
) && !TYPE_FAT_POINTER_P (etype
))
4030 return convert_to_fat_pointer (type
, expr
);
4032 /* If we are converting between two aggregate types that are mere
4033 variants, just make a VIEW_CONVERT_EXPR. */
4034 else if (code
== ecode
4035 && AGGREGATE_TYPE_P (type
)
4036 && gnat_types_compatible_p (type
, etype
))
4037 return build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4039 /* In all other cases of related types, make a NOP_EXPR. */
4040 else if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
4041 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
4042 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
4043 return fold_convert (type
, expr
);
4048 return fold_build1 (CONVERT_EXPR
, type
, expr
);
4051 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
4052 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
4053 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
4054 return unchecked_convert (type
, expr
, false);
4055 else if (TYPE_BIASED_REPRESENTATION_P (type
))
4056 return fold_convert (type
,
4057 fold_build2 (MINUS_EXPR
, TREE_TYPE (type
),
4058 convert (TREE_TYPE (type
), expr
),
4059 TYPE_MIN_VALUE (type
)));
4061 /* ... fall through ... */
4065 /* If we are converting an additive expression to an integer type
4066 with lower precision, be wary of the optimization that can be
4067 applied by convert_to_integer. There are 2 problematic cases:
4068 - if the first operand was originally of a biased type,
4069 because we could be recursively called to convert it
4070 to an intermediate type and thus rematerialize the
4071 additive operator endlessly,
4072 - if the expression contains a placeholder, because an
4073 intermediate conversion that changes the sign could
4074 be inserted and thus introduce an artificial overflow
4075 at compile time when the placeholder is substituted. */
4076 if (code
== INTEGER_TYPE
4077 && ecode
== INTEGER_TYPE
4078 && TYPE_PRECISION (type
) < TYPE_PRECISION (etype
)
4079 && (TREE_CODE (expr
) == PLUS_EXPR
|| TREE_CODE (expr
) == MINUS_EXPR
))
4081 tree op0
= get_unwidened (TREE_OPERAND (expr
, 0), type
);
4083 if ((TREE_CODE (TREE_TYPE (op0
)) == INTEGER_TYPE
4084 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0
)))
4085 || CONTAINS_PLACEHOLDER_P (expr
))
4086 return build1 (NOP_EXPR
, type
, expr
);
4089 return fold (convert_to_integer (type
, expr
));
4092 case REFERENCE_TYPE
:
4093 /* If converting between two pointers to records denoting
4094 both a template and type, adjust if needed to account
4095 for any differing offsets, since one might be negative. */
4096 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
4099 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
4100 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
4101 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
4102 sbitsize_int (BITS_PER_UNIT
));
4104 expr
= build1 (NOP_EXPR
, type
, expr
);
4105 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
4106 if (integer_zerop (byte_diff
))
4109 return build_binary_op (POINTER_PLUS_EXPR
, type
, expr
,
4110 fold (convert (sizetype
, byte_diff
)));
4113 /* If converting to a thin pointer, handle specially. */
4114 if (TYPE_THIN_POINTER_P (type
)
4115 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)))
4116 return convert_to_thin_pointer (type
, expr
);
4118 /* If converting fat pointer to normal pointer, get the pointer to the
4119 array and then convert it. */
4120 else if (TYPE_FAT_POINTER_P (etype
))
4121 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
4124 return fold (convert_to_pointer (type
, expr
));
4127 return fold (convert_to_real (type
, expr
));
4130 if (TYPE_JUSTIFIED_MODULAR_P (type
) && !AGGREGATE_TYPE_P (etype
))
4132 gnat_build_constructor
4133 (type
, tree_cons (TYPE_FIELDS (type
),
4134 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
4137 /* ... fall through ... */
4140 /* In these cases, assume the front-end has validated the conversion.
4141 If the conversion is valid, it will be a bit-wise conversion, so
4142 it can be viewed as an unchecked conversion. */
4143 return unchecked_convert (type
, expr
, false);
4146 /* This is a either a conversion between a tagged type and some
4147 subtype, which we have to mark as a UNION_TYPE because of
4148 overlapping fields or a conversion of an Unchecked_Union. */
4149 return unchecked_convert (type
, expr
, false);
4151 case UNCONSTRAINED_ARRAY_TYPE
:
4152 /* If EXPR is a constrained array, take its address, convert it to a
4153 fat pointer, and then dereference it. Likewise if EXPR is a
4154 record containing both a template and a constrained array.
4155 Note that a record representing a justified modular type
4156 always represents a packed constrained array. */
4157 if (ecode
== ARRAY_TYPE
4158 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
4159 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
4160 || (ecode
== RECORD_TYPE
&& TYPE_JUSTIFIED_MODULAR_P (etype
)))
4163 (INDIRECT_REF
, NULL_TREE
,
4164 convert_to_fat_pointer (TREE_TYPE (type
),
4165 build_unary_op (ADDR_EXPR
,
4168 /* Do something very similar for converting one unconstrained
4169 array to another. */
4170 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
4172 build_unary_op (INDIRECT_REF
, NULL_TREE
,
4173 convert (TREE_TYPE (type
),
4174 build_unary_op (ADDR_EXPR
,
4180 return fold (convert_to_complex (type
, expr
));
4187 /* Remove all conversions that are done in EXP. This includes converting
4188 from a padded type or to a justified modular type. If TRUE_ADDRESS
4189 is true, always return the address of the containing object even if
4190 the address is not bit-aligned. */
4193 remove_conversions (tree exp
, bool true_address
)
4195 switch (TREE_CODE (exp
))
4199 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
4200 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
4202 remove_conversions (VEC_index (constructor_elt
,
4203 CONSTRUCTOR_ELTS (exp
), 0)->value
,
4208 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
4209 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4210 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4213 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
4215 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
4224 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4225 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4226 likewise return an expression pointing to the underlying array. */
4229 maybe_unconstrained_array (tree exp
)
4231 enum tree_code code
= TREE_CODE (exp
);
4234 switch (TREE_CODE (TREE_TYPE (exp
)))
4236 case UNCONSTRAINED_ARRAY_TYPE
:
4237 if (code
== UNCONSTRAINED_ARRAY_REF
)
4240 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
4241 build_component_ref (TREE_OPERAND (exp
, 0),
4242 get_identifier ("P_ARRAY"),
4244 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
4248 else if (code
== NULL_EXPR
)
4249 return build1 (NULL_EXPR
,
4250 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4251 (TREE_TYPE (TREE_TYPE (exp
))))),
4252 TREE_OPERAND (exp
, 0));
4255 /* If this is a padded type, convert to the unpadded type and see if
4256 it contains a template. */
4257 if (TYPE_IS_PADDING_P (TREE_TYPE (exp
)))
4259 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
4260 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4261 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4263 build_component_ref (new, NULL_TREE
,
4264 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4267 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
4269 build_component_ref (exp
, NULL_TREE
,
4270 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))), 0);
4280 /* Return true if EXPR is an expression that can be folded as an operand
4281 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4285 can_fold_for_view_convert_p (tree expr
)
4289 /* The folder will fold NOP_EXPRs between integral types with the same
4290 precision (in the middle-end's sense). We cannot allow it if the
4291 types don't have the same precision in the Ada sense as well. */
4292 if (TREE_CODE (expr
) != NOP_EXPR
)
4295 t1
= TREE_TYPE (expr
);
4296 t2
= TREE_TYPE (TREE_OPERAND (expr
, 0));
4298 /* Defer to the folder for non-integral conversions. */
4299 if (!(INTEGRAL_TYPE_P (t1
) && INTEGRAL_TYPE_P (t2
)))
4302 /* Only fold conversions that preserve both precisions. */
4303 if (TYPE_PRECISION (t1
) == TYPE_PRECISION (t2
)
4304 && operand_equal_p (rm_size (t1
), rm_size (t2
), 0))
4310 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4311 If NOTRUNC_P is true, truncation operations should be suppressed.
4313 Special care is required with (source or target) integral types whose
4314 precision is not equal to their size, to make sure we fetch or assign
4315 the value bits whose location might depend on the endianness, e.g.
4317 Rmsize : constant := 8;
4318 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4320 type Bit_Array is array (1 .. Rmsize) of Boolean;
4321 pragma Pack (Bit_Array);
4323 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4325 Value : Int := 2#1000_0001#;
4326 Vbits : Bit_Array := To_Bit_Array (Value);
4328 we expect the 8 bits at Vbits'Address to always contain Value, while
4329 their original location depends on the endianness, at Value'Address
4330 on a little-endian architecture but not on a big-endian one.
4332 ??? There is a problematic discrepancy between what is called precision
4333 here (and more generally throughout gigi) for integral types and what is
4334 called precision in the middle-end. In the former case it's the RM size
4335 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4336 latter case, the hitch being that they are not equal when they matter,
4337 that is when the number of value bits is not equal to the type's size:
4338 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4339 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4341 The consequence is that gigi must duplicate code bridging the gap between
4342 the type's size and its precision that exists for TYPE_PRECISION in the
4343 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4344 wary of transformations applied in the middle-end based on TYPE_PRECISION
4345 because this value doesn't reflect the actual precision for Ada. */
4348 unchecked_convert (tree type
, tree expr
, bool notrunc_p
)
4350 tree etype
= TREE_TYPE (expr
);
4352 /* If the expression is already the right type, we are done. */
4356 /* If both types types are integral just do a normal conversion.
4357 Likewise for a conversion to an unconstrained array. */
4358 if ((((INTEGRAL_TYPE_P (type
)
4359 && !(TREE_CODE (type
) == INTEGER_TYPE
4360 && TYPE_VAX_FLOATING_POINT_P (type
)))
4361 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
4362 || (TREE_CODE (type
) == RECORD_TYPE
4363 && TYPE_JUSTIFIED_MODULAR_P (type
)))
4364 && ((INTEGRAL_TYPE_P (etype
)
4365 && !(TREE_CODE (etype
) == INTEGER_TYPE
4366 && TYPE_VAX_FLOATING_POINT_P (etype
)))
4367 || (POINTER_TYPE_P (etype
) && !TYPE_THIN_POINTER_P (etype
))
4368 || (TREE_CODE (etype
) == RECORD_TYPE
4369 && TYPE_JUSTIFIED_MODULAR_P (etype
))))
4370 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
4372 if (TREE_CODE (etype
) == INTEGER_TYPE
4373 && TYPE_BIASED_REPRESENTATION_P (etype
))
4375 tree ntype
= copy_type (etype
);
4376 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
4377 TYPE_MAIN_VARIANT (ntype
) = ntype
;
4378 expr
= build1 (NOP_EXPR
, ntype
, expr
);
4381 if (TREE_CODE (type
) == INTEGER_TYPE
4382 && TYPE_BIASED_REPRESENTATION_P (type
))
4384 tree rtype
= copy_type (type
);
4385 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
4386 TYPE_MAIN_VARIANT (rtype
) = rtype
;
4387 expr
= convert (rtype
, expr
);
4388 expr
= build1 (NOP_EXPR
, type
, expr
);
4391 /* We have another special case: if we are unchecked converting either
4392 a subtype or a type with limited range into a base type, we need to
4393 ensure that VRP doesn't propagate range information because this
4394 conversion may be done precisely to validate that the object is
4395 within the range it is supposed to have. */
4396 else if (TREE_CODE (expr
) != INTEGER_CST
4397 && TREE_CODE (type
) == INTEGER_TYPE
&& !TREE_TYPE (type
)
4398 && ((TREE_CODE (etype
) == INTEGER_TYPE
&& TREE_TYPE (etype
))
4399 || TREE_CODE (etype
) == ENUMERAL_TYPE
4400 || TREE_CODE (etype
) == BOOLEAN_TYPE
))
4402 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4403 in order not to be deemed an useless type conversion, it must
4404 be from subtype to base type.
4406 Therefore we first do the bulk of the conversion to a subtype of
4407 the final type. And this conversion must itself not be deemed
4408 useless if the source type is not a subtype because, otherwise,
4409 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4410 why we toggle the unsigned flag in this conversion, which is
4411 harmless since the final conversion is only a reinterpretation
4414 ??? This may raise addressability and/or aliasing issues because
4415 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4416 address of its operand to be taken if it is deemed addressable
4417 and not already in GIMPLE form. */
4419 = gnat_type_for_mode (TYPE_MODE (type
), !TYPE_UNSIGNED (etype
));
4420 rtype
= copy_type (rtype
);
4421 TYPE_MAIN_VARIANT (rtype
) = rtype
;
4422 TREE_TYPE (rtype
) = type
;
4423 expr
= convert (rtype
, expr
);
4424 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4428 expr
= convert (type
, expr
);
4431 /* If we are converting to an integral type whose precision is not equal
4432 to its size, first unchecked convert to a record that contains an
4433 object of the output type. Then extract the field. */
4434 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
4435 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4436 GET_MODE_BITSIZE (TYPE_MODE (type
))))
4438 tree rec_type
= make_node (RECORD_TYPE
);
4439 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
4440 rec_type
, 1, 0, 0, 0);
4442 TYPE_FIELDS (rec_type
) = field
;
4443 layout_type (rec_type
);
4445 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
4446 expr
= build_component_ref (expr
, NULL_TREE
, field
, 0);
4449 /* Similarly if we are converting from an integral type whose precision
4450 is not equal to its size. */
4451 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
)
4452 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
4453 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
4455 tree rec_type
= make_node (RECORD_TYPE
);
4457 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
4460 TYPE_FIELDS (rec_type
) = field
;
4461 layout_type (rec_type
);
4463 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
4464 expr
= unchecked_convert (type
, expr
, notrunc_p
);
4467 /* We have a special case when we are converting between two
4468 unconstrained array types. In that case, take the address,
4469 convert the fat pointer types, and dereference. */
4470 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
4471 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
4472 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
4473 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
4474 build_unary_op (ADDR_EXPR
, NULL_TREE
,
4478 expr
= maybe_unconstrained_array (expr
);
4479 etype
= TREE_TYPE (expr
);
4480 if (can_fold_for_view_convert_p (expr
))
4481 expr
= fold_build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4483 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
4486 /* If the result is an integral type whose precision is not equal to its
4487 size, sign- or zero-extend the result. We need not do this if the input
4488 is an integral type of the same precision and signedness or if the output
4489 is a biased type or if both the input and output are unsigned. */
4491 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
)
4492 && !(TREE_CODE (type
) == INTEGER_TYPE
4493 && TYPE_BIASED_REPRESENTATION_P (type
))
4494 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
4495 GET_MODE_BITSIZE (TYPE_MODE (type
)))
4496 && !(INTEGRAL_TYPE_P (etype
)
4497 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
4498 && operand_equal_p (TYPE_RM_SIZE (type
),
4499 (TYPE_RM_SIZE (etype
) != 0
4500 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
4502 && !(TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
4504 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
4505 TYPE_UNSIGNED (type
));
4507 = convert (base_type
,
4508 size_binop (MINUS_EXPR
,
4510 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
4511 TYPE_RM_SIZE (type
)));
4514 build_binary_op (RSHIFT_EXPR
, base_type
,
4515 build_binary_op (LSHIFT_EXPR
, base_type
,
4516 convert (base_type
, expr
),
4521 /* An unchecked conversion should never raise Constraint_Error. The code
4522 below assumes that GCC's conversion routines overflow the same way that
4523 the underlying hardware does. This is probably true. In the rare case
4524 when it is false, we can rely on the fact that such conversions are
4525 erroneous anyway. */
4526 if (TREE_CODE (expr
) == INTEGER_CST
)
4527 TREE_OVERFLOW (expr
) = 0;
4529 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4530 show no longer constant. */
4531 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
4532 && !operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
),
4534 TREE_CONSTANT (expr
) = 0;
4539 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4540 the latter being a record type as predicated by Is_Record_Type. */
4543 tree_code_for_record_type (Entity_Id gnat_type
)
4545 Node_Id component_list
4546 = Component_List (Type_Definition
4548 (Implementation_Base_Type (gnat_type
))));
4551 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4552 we have a non-discriminant field outside a variant. In either case,
4553 it's a RECORD_TYPE. */
4555 if (!Is_Unchecked_Union (gnat_type
))
4558 for (component
= First_Non_Pragma (Component_Items (component_list
));
4559 Present (component
);
4560 component
= Next_Non_Pragma (component
))
4561 if (Ekind (Defining_Entity (component
)) == E_Component
)
4567 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4568 component of an aggregate type. */
4571 type_for_nonaliased_component_p (tree gnu_type
)
4573 /* If the type is passed by reference, we may have pointers to the
4574 component so it cannot be made non-aliased. */
4575 if (must_pass_by_ref (gnu_type
) || default_pass_by_ref (gnu_type
))
4578 /* We used to say that any component of aggregate type is aliased
4579 because the front-end may take 'Reference of it. The front-end
4580 has been enhanced in the meantime so as to use a renaming instead
4581 in most cases, but the back-end can probably take the address of
4582 such a component too so we go for the conservative stance.
4584 For instance, we might need the address of any array type, even
4585 if normally passed by copy, to construct a fat pointer if the
4586 component is used as an actual for an unconstrained formal.
4588 Likewise for record types: even if a specific record subtype is
4589 passed by copy, the parent type might be passed by ref (e.g. if
4590 it's of variable size) and we might take the address of a child
4591 component to pass to a parent formal. We have no way to check
4592 for such conditions here. */
4593 if (AGGREGATE_TYPE_P (gnu_type
))
4599 /* Perform final processing on global variables. */
4602 gnat_write_global_declarations (void)
4604 /* Proceed to optimize and emit assembly.
4605 FIXME: shouldn't be the front end's responsibility to call this. */
4608 /* Emit debug info for all global declarations. */
4609 emit_debug_global_declarations (VEC_address (tree
, global_decls
),
4610 VEC_length (tree
, global_decls
));
4613 /* ************************************************************************
4614 * * GCC builtins support *
4615 * ************************************************************************ */
4617 /* The general scheme is fairly simple:
4619 For each builtin function/type to be declared, gnat_install_builtins calls
4620 internal facilities which eventually get to gnat_push_decl, which in turn
4621 tracks the so declared builtin function decls in the 'builtin_decls' global
4622 datastructure. When an Intrinsic subprogram declaration is processed, we
4623 search this global datastructure to retrieve the associated BUILT_IN DECL
4626 /* Search the chain of currently available builtin declarations for a node
4627 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4628 found, if any, or NULL_TREE otherwise. */
4630 builtin_decl_for (tree name
)
4635 for (i
= 0; VEC_iterate(tree
, builtin_decls
, i
, decl
); i
++)
4636 if (DECL_NAME (decl
) == name
)
4642 /* The code below eventually exposes gnat_install_builtins, which declares
4643 the builtin types and functions we might need, either internally or as
4644 user accessible facilities.
4646 ??? This is a first implementation shot, still in rough shape. It is
4647 heavily inspired from the "C" family implementation, with chunks copied
4648 verbatim from there.
4650 Two obvious TODO candidates are
4651 o Use a more efficient name/decl mapping scheme
4652 o Devise a middle-end infrastructure to avoid having to copy
4653 pieces between front-ends. */
4655 /* ----------------------------------------------------------------------- *
4656 * BUILTIN ELEMENTARY TYPES *
4657 * ----------------------------------------------------------------------- */
4659 /* Standard data types to be used in builtin argument declarations. */
4663 CTI_SIGNED_SIZE_TYPE
, /* For format checking only. */
4665 CTI_CONST_STRING_TYPE
,
4670 static tree c_global_trees
[CTI_MAX
];
4672 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4673 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4674 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4676 /* ??? In addition some attribute handlers, we currently don't support a
4677 (small) number of builtin-types, which in turns inhibits support for a
4678 number of builtin functions. */
4679 #define wint_type_node void_type_node
4680 #define intmax_type_node void_type_node
4681 #define uintmax_type_node void_type_node
4683 /* Build the void_list_node (void_type_node having been created). */
4686 build_void_list_node (void)
4688 tree t
= build_tree_list (NULL_TREE
, void_type_node
);
4692 /* Used to help initialize the builtin-types.def table. When a type of
4693 the correct size doesn't exist, use error_mark_node instead of NULL.
4694 The later results in segfaults even when a decl using the type doesn't
4698 builtin_type_for_size (int size
, bool unsignedp
)
4700 tree type
= lang_hooks
.types
.type_for_size (size
, unsignedp
);
4701 return type
? type
: error_mark_node
;
4704 /* Build/push the elementary type decls that builtin functions/types
4708 install_builtin_elementary_types (void)
4710 signed_size_type_node
= size_type_node
;
4711 pid_type_node
= integer_type_node
;
4712 void_list_node
= build_void_list_node ();
4714 string_type_node
= build_pointer_type (char_type_node
);
4715 const_string_type_node
4716 = build_pointer_type (build_qualified_type
4717 (char_type_node
, TYPE_QUAL_CONST
));
4720 /* ----------------------------------------------------------------------- *
4721 * BUILTIN FUNCTION TYPES *
4722 * ----------------------------------------------------------------------- */
4724 /* Now, builtin function types per se. */
4728 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4729 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4730 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4731 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4732 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4733 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4734 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4735 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4736 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4737 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4738 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4739 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4740 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4741 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4742 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4744 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4745 #include "builtin-types.def"
4746 #undef DEF_PRIMITIVE_TYPE
4747 #undef DEF_FUNCTION_TYPE_0
4748 #undef DEF_FUNCTION_TYPE_1
4749 #undef DEF_FUNCTION_TYPE_2
4750 #undef DEF_FUNCTION_TYPE_3
4751 #undef DEF_FUNCTION_TYPE_4
4752 #undef DEF_FUNCTION_TYPE_5
4753 #undef DEF_FUNCTION_TYPE_6
4754 #undef DEF_FUNCTION_TYPE_7
4755 #undef DEF_FUNCTION_TYPE_VAR_0
4756 #undef DEF_FUNCTION_TYPE_VAR_1
4757 #undef DEF_FUNCTION_TYPE_VAR_2
4758 #undef DEF_FUNCTION_TYPE_VAR_3
4759 #undef DEF_FUNCTION_TYPE_VAR_4
4760 #undef DEF_FUNCTION_TYPE_VAR_5
4761 #undef DEF_POINTER_TYPE
4765 typedef enum c_builtin_type builtin_type
;
4767 /* A temporary array used in communication with def_fn_type. */
4768 static GTY(()) tree builtin_types
[(int) BT_LAST
+ 1];
4770 /* A helper function for install_builtin_types. Build function type
4771 for DEF with return type RET and N arguments. If VAR is true, then the
4772 function should be variadic after those N arguments.
4774 Takes special care not to ICE if any of the types involved are
4775 error_mark_node, which indicates that said type is not in fact available
4776 (see builtin_type_for_size). In which case the function type as a whole
4777 should be error_mark_node. */
4780 def_fn_type (builtin_type def
, builtin_type ret
, bool var
, int n
, ...)
4782 tree args
= NULL
, t
;
4787 for (i
= 0; i
< n
; ++i
)
4789 builtin_type a
= va_arg (list
, builtin_type
);
4790 t
= builtin_types
[a
];
4791 if (t
== error_mark_node
)
4793 args
= tree_cons (NULL_TREE
, t
, args
);
4797 args
= nreverse (args
);
4799 args
= chainon (args
, void_list_node
);
4801 t
= builtin_types
[ret
];
4802 if (t
== error_mark_node
)
4804 t
= build_function_type (t
, args
);
4807 builtin_types
[def
] = t
;
4810 /* Build the builtin function types and install them in the builtin_types
4811 array for later use in builtin function decls. */
4814 install_builtin_function_types (void)
4816 tree va_list_ref_type_node
;
4817 tree va_list_arg_type_node
;
4819 if (TREE_CODE (va_list_type_node
) == ARRAY_TYPE
)
4821 va_list_arg_type_node
= va_list_ref_type_node
=
4822 build_pointer_type (TREE_TYPE (va_list_type_node
));
4826 va_list_arg_type_node
= va_list_type_node
;
4827 va_list_ref_type_node
= build_reference_type (va_list_type_node
);
4830 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4831 builtin_types[ENUM] = VALUE;
4832 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4833 def_fn_type (ENUM, RETURN, 0, 0);
4834 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4835 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4836 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4837 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4838 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4839 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4840 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4841 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4842 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4843 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4844 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4846 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4847 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4849 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4850 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4851 def_fn_type (ENUM, RETURN, 1, 0);
4852 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4853 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4854 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4855 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4856 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4857 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4858 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4859 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4860 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4861 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4862 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4863 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4865 #include "builtin-types.def"
4867 #undef DEF_PRIMITIVE_TYPE
4868 #undef DEF_FUNCTION_TYPE_1
4869 #undef DEF_FUNCTION_TYPE_2
4870 #undef DEF_FUNCTION_TYPE_3
4871 #undef DEF_FUNCTION_TYPE_4
4872 #undef DEF_FUNCTION_TYPE_5
4873 #undef DEF_FUNCTION_TYPE_6
4874 #undef DEF_FUNCTION_TYPE_VAR_0
4875 #undef DEF_FUNCTION_TYPE_VAR_1
4876 #undef DEF_FUNCTION_TYPE_VAR_2
4877 #undef DEF_FUNCTION_TYPE_VAR_3
4878 #undef DEF_FUNCTION_TYPE_VAR_4
4879 #undef DEF_FUNCTION_TYPE_VAR_5
4880 #undef DEF_POINTER_TYPE
4881 builtin_types
[(int) BT_LAST
] = NULL_TREE
;
4884 /* ----------------------------------------------------------------------- *
4885 * BUILTIN ATTRIBUTES *
4886 * ----------------------------------------------------------------------- */
4888 enum built_in_attribute
4890 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4891 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4892 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4893 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4894 #include "builtin-attrs.def"
4895 #undef DEF_ATTR_NULL_TREE
4897 #undef DEF_ATTR_IDENT
4898 #undef DEF_ATTR_TREE_LIST
4902 static GTY(()) tree built_in_attributes
[(int) ATTR_LAST
];
4905 install_builtin_attributes (void)
4907 /* Fill in the built_in_attributes array. */
4908 #define DEF_ATTR_NULL_TREE(ENUM) \
4909 built_in_attributes[(int) ENUM] = NULL_TREE;
4910 #define DEF_ATTR_INT(ENUM, VALUE) \
4911 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4912 #define DEF_ATTR_IDENT(ENUM, STRING) \
4913 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4914 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4915 built_in_attributes[(int) ENUM] \
4916 = tree_cons (built_in_attributes[(int) PURPOSE], \
4917 built_in_attributes[(int) VALUE], \
4918 built_in_attributes[(int) CHAIN]);
4919 #include "builtin-attrs.def"
4920 #undef DEF_ATTR_NULL_TREE
4922 #undef DEF_ATTR_IDENT
4923 #undef DEF_ATTR_TREE_LIST
4926 /* Handle a "const" attribute; arguments as in
4927 struct attribute_spec.handler. */
4930 handle_const_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4931 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4934 if (TREE_CODE (*node
) == FUNCTION_DECL
)
4935 TREE_READONLY (*node
) = 1;
4937 *no_add_attrs
= true;
4942 /* Handle a "nothrow" attribute; arguments as in
4943 struct attribute_spec.handler. */
4946 handle_nothrow_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4947 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4950 if (TREE_CODE (*node
) == FUNCTION_DECL
)
4951 TREE_NOTHROW (*node
) = 1;
4953 *no_add_attrs
= true;
4958 /* Handle a "pure" attribute; arguments as in
4959 struct attribute_spec.handler. */
4962 handle_pure_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
4963 int ARG_UNUSED (flags
), bool *no_add_attrs
)
4965 if (TREE_CODE (*node
) == FUNCTION_DECL
)
4966 DECL_PURE_P (*node
) = 1;
4967 /* ??? TODO: Support types. */
4970 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
4971 *no_add_attrs
= true;
4977 /* Handle a "no vops" attribute; arguments as in
4978 struct attribute_spec.handler. */
4981 handle_novops_attribute (tree
*node
, tree
ARG_UNUSED (name
),
4982 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
4983 bool *ARG_UNUSED (no_add_attrs
))
4985 gcc_assert (TREE_CODE (*node
) == FUNCTION_DECL
);
4986 DECL_IS_NOVOPS (*node
) = 1;
4990 /* Helper for nonnull attribute handling; fetch the operand number
4991 from the attribute argument list. */
4994 get_nonnull_operand (tree arg_num_expr
, unsigned HOST_WIDE_INT
*valp
)
4996 /* Verify the arg number is a constant. */
4997 if (TREE_CODE (arg_num_expr
) != INTEGER_CST
4998 || TREE_INT_CST_HIGH (arg_num_expr
) != 0)
5001 *valp
= TREE_INT_CST_LOW (arg_num_expr
);
5005 /* Handle the "nonnull" attribute. */
5007 handle_nonnull_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5008 tree args
, int ARG_UNUSED (flags
),
5012 unsigned HOST_WIDE_INT attr_arg_num
;
5014 /* If no arguments are specified, all pointer arguments should be
5015 non-null. Verify a full prototype is given so that the arguments
5016 will have the correct types when we actually check them later. */
5019 if (!TYPE_ARG_TYPES (type
))
5021 error ("nonnull attribute without arguments on a non-prototype");
5022 *no_add_attrs
= true;
5027 /* Argument list specified. Verify that each argument number references
5028 a pointer argument. */
5029 for (attr_arg_num
= 1; args
; args
= TREE_CHAIN (args
))
5032 unsigned HOST_WIDE_INT arg_num
= 0, ck_num
;
5034 if (!get_nonnull_operand (TREE_VALUE (args
), &arg_num
))
5036 error ("nonnull argument has invalid operand number (argument %lu)",
5037 (unsigned long) attr_arg_num
);
5038 *no_add_attrs
= true;
5042 argument
= TYPE_ARG_TYPES (type
);
5045 for (ck_num
= 1; ; ck_num
++)
5047 if (!argument
|| ck_num
== arg_num
)
5049 argument
= TREE_CHAIN (argument
);
5053 || TREE_CODE (TREE_VALUE (argument
)) == VOID_TYPE
)
5055 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5056 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5057 *no_add_attrs
= true;
5061 if (TREE_CODE (TREE_VALUE (argument
)) != POINTER_TYPE
)
5063 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5064 (unsigned long) attr_arg_num
, (unsigned long) arg_num
);
5065 *no_add_attrs
= true;
5074 /* Handle a "sentinel" attribute. */
5077 handle_sentinel_attribute (tree
*node
, tree name
, tree args
,
5078 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5080 tree params
= TYPE_ARG_TYPES (*node
);
5084 warning (OPT_Wattributes
,
5085 "%qE attribute requires prototypes with named arguments", name
);
5086 *no_add_attrs
= true;
5090 while (TREE_CHAIN (params
))
5091 params
= TREE_CHAIN (params
);
5093 if (VOID_TYPE_P (TREE_VALUE (params
)))
5095 warning (OPT_Wattributes
,
5096 "%qE attribute only applies to variadic functions", name
);
5097 *no_add_attrs
= true;
5103 tree position
= TREE_VALUE (args
);
5105 if (TREE_CODE (position
) != INTEGER_CST
)
5107 warning (0, "requested position is not an integer constant");
5108 *no_add_attrs
= true;
5112 if (tree_int_cst_lt (position
, integer_zero_node
))
5114 warning (0, "requested position is less than zero");
5115 *no_add_attrs
= true;
5123 /* Handle a "noreturn" attribute; arguments as in
5124 struct attribute_spec.handler. */
5127 handle_noreturn_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5128 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5130 tree type
= TREE_TYPE (*node
);
5132 /* See FIXME comment in c_common_attribute_table. */
5133 if (TREE_CODE (*node
) == FUNCTION_DECL
)
5134 TREE_THIS_VOLATILE (*node
) = 1;
5135 else if (TREE_CODE (type
) == POINTER_TYPE
5136 && TREE_CODE (TREE_TYPE (type
)) == FUNCTION_TYPE
)
5138 = build_pointer_type
5139 (build_type_variant (TREE_TYPE (type
),
5140 TYPE_READONLY (TREE_TYPE (type
)), 1));
5143 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
5144 *no_add_attrs
= true;
5150 /* Handle a "malloc" attribute; arguments as in
5151 struct attribute_spec.handler. */
5154 handle_malloc_attribute (tree
*node
, tree name
, tree
ARG_UNUSED (args
),
5155 int ARG_UNUSED (flags
), bool *no_add_attrs
)
5157 if (TREE_CODE (*node
) == FUNCTION_DECL
5158 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node
))))
5159 DECL_IS_MALLOC (*node
) = 1;
5162 warning (OPT_Wattributes
, "%qE attribute ignored", name
);
5163 *no_add_attrs
= true;
5169 /* Fake handler for attributes we don't properly support. */
5172 fake_attribute_handler (tree
* ARG_UNUSED (node
),
5173 tree
ARG_UNUSED (name
),
5174 tree
ARG_UNUSED (args
),
5175 int ARG_UNUSED (flags
),
5176 bool * ARG_UNUSED (no_add_attrs
))
5181 /* Handle a "type_generic" attribute. */
5184 handle_type_generic_attribute (tree
*node
, tree
ARG_UNUSED (name
),
5185 tree
ARG_UNUSED (args
), int ARG_UNUSED (flags
),
5186 bool * ARG_UNUSED (no_add_attrs
))
5190 /* Ensure we have a function type. */
5191 gcc_assert (TREE_CODE (*node
) == FUNCTION_TYPE
);
5193 params
= TYPE_ARG_TYPES (*node
);
5194 while (params
&& ! VOID_TYPE_P (TREE_VALUE (params
)))
5195 params
= TREE_CHAIN (params
);
5197 /* Ensure we have a variadic function. */
5198 gcc_assert (!params
);
5203 /* ----------------------------------------------------------------------- *
5204 * BUILTIN FUNCTIONS *
5205 * ----------------------------------------------------------------------- */
5207 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5208 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5209 if nonansi_p and flag_no_nonansi_builtin. */
5212 def_builtin_1 (enum built_in_function fncode
,
5214 enum built_in_class fnclass
,
5215 tree fntype
, tree libtype
,
5216 bool both_p
, bool fallback_p
,
5217 bool nonansi_p ATTRIBUTE_UNUSED
,
5218 tree fnattrs
, bool implicit_p
)
5221 const char *libname
;
5223 /* Preserve an already installed decl. It most likely was setup in advance
5224 (e.g. as part of the internal builtins) for specific reasons. */
5225 if (built_in_decls
[(int) fncode
] != NULL_TREE
)
5228 gcc_assert ((!both_p
&& !fallback_p
)
5229 || !strncmp (name
, "__builtin_",
5230 strlen ("__builtin_")));
5232 libname
= name
+ strlen ("__builtin_");
5233 decl
= add_builtin_function (name
, fntype
, fncode
, fnclass
,
5234 (fallback_p
? libname
: NULL
),
5237 /* ??? This is normally further controlled by command-line options
5238 like -fno-builtin, but we don't have them for Ada. */
5239 add_builtin_function (libname
, libtype
, fncode
, fnclass
,
5242 built_in_decls
[(int) fncode
] = decl
;
5244 implicit_built_in_decls
[(int) fncode
] = decl
;
5247 static int flag_isoc94
= 0;
5248 static int flag_isoc99
= 0;
5250 /* Install what the common builtins.def offers. */
5253 install_builtin_functions (void)
5255 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5256 NONANSI_P, ATTRS, IMPLICIT, COND) \
5258 def_builtin_1 (ENUM, NAME, CLASS, \
5259 builtin_types[(int) TYPE], \
5260 builtin_types[(int) LIBTYPE], \
5261 BOTH_P, FALLBACK_P, NONANSI_P, \
5262 built_in_attributes[(int) ATTRS], IMPLICIT);
5263 #include "builtins.def"
5267 /* ----------------------------------------------------------------------- *
5268 * BUILTIN FUNCTIONS *
5269 * ----------------------------------------------------------------------- */
5271 /* Install the builtin functions we might need. */
5274 gnat_install_builtins (void)
5276 install_builtin_elementary_types ();
5277 install_builtin_function_types ();
5278 install_builtin_attributes ();
5280 /* Install builtins used by generic middle-end pieces first. Some of these
5281 know about internal specificities and control attributes accordingly, for
5282 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5283 the generic definition from builtins.def. */
5284 build_common_builtin_nodes ();
5286 /* Now, install the target specific builtins, such as the AltiVec family on
5287 ppc, and the common set as exposed by builtins.def. */
5288 targetm
.init_builtins ();
5289 install_builtin_functions ();
5292 #include "gt-ada-utils.h"
5293 #include "gtype-ada.h"