1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
25 ****************************************************************************/
29 #include "coretypes.h"
56 #ifndef MAX_FIXED_MODE_SIZE
57 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
60 #ifndef MAX_BITS_PER_WORD
61 #define MAX_BITS_PER_WORD BITS_PER_WORD
64 /* If nonzero, pretend we are allocating at global level. */
67 /* Tree nodes for the various types and decls we create. */
68 tree gnat_std_decls
[(int) ADT_LAST
];
70 /* Functions to call for each of the possible raise reasons. */
71 tree gnat_raise_decls
[(int) LAST_REASON_CODE
+ 1];
73 /* Associates a GNAT tree node to a GCC tree node. It is used in
74 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
75 of `save_gnu_tree' for more info. */
76 static GTY((length ("max_gnat_nodes"))) tree
*associate_gnat_to_gnu
;
78 /* This listhead is used to record any global objects that need elaboration.
79 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
80 initial value to assign. */
82 static GTY(()) tree pending_elaborations
;
84 /* This stack allows us to momentarily switch to generating elaboration
85 lists for an inner context. */
87 struct e_stack
GTY(()) {
91 static GTY(()) struct e_stack
*elist_stack
;
93 /* This variable keeps a table for types for each precision so that we only
94 allocate each of them once. Signed and unsigned types are kept separate.
96 Note that these types are only used when fold-const requests something
97 special. Perhaps we should NOT share these types; we'll see how it
99 static GTY(()) tree signed_and_unsigned_types
[2 * MAX_BITS_PER_WORD
+ 1][2];
101 /* Likewise for float types, but record these by mode. */
102 static GTY(()) tree float_types
[NUM_MACHINE_MODES
];
104 /* For each binding contour we allocate a binding_level structure which records
105 the entities defined or declared in that contour. Contours include:
108 one for each subprogram definition
109 one for each compound statement (declare block)
111 Binding contours are used to create GCC tree BLOCK nodes. */
113 struct binding_level
GTY(())
115 /* A chain of ..._DECL nodes for all variables, constants, functions,
116 parameters and type declarations. These ..._DECL nodes are chained
117 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
118 in the reverse of the order supplied to be compatible with the
121 /* For each level (except the global one), a chain of BLOCK nodes for all
122 the levels that were entered and exited one level down from this one. */
124 /* The BLOCK node for this level, if one has been preallocated.
125 If 0, the BLOCK is allocated (if needed) when the level is popped. */
127 /* The binding level containing this one (the enclosing binding level). */
128 struct binding_level
*level_chain
;
131 /* The binding level currently in effect. */
132 static GTY(()) struct binding_level
*current_binding_level
;
134 /* A chain of binding_level structures awaiting reuse. */
135 static GTY((deletable (""))) struct binding_level
*free_binding_level
;
137 /* The outermost binding level. This binding level is created when the
138 compiler is started and it will exist through the entire compilation. */
139 static struct binding_level
*global_binding_level
;
141 /* Binding level structures are initialized by copying this one. */
142 static struct binding_level clear_binding_level
= {NULL
, NULL
, NULL
, NULL
};
144 struct language_function
GTY(())
149 static tree
merge_sizes (tree
, tree
, tree
, int, int);
150 static tree
compute_related_constant (tree
, tree
);
151 static tree
split_plus (tree
, tree
*);
152 static int value_zerop (tree
);
153 static tree
float_type_for_precision (int, enum machine_mode
);
154 static tree
convert_to_fat_pointer (tree
, tree
);
155 static tree
convert_to_thin_pointer (tree
, tree
);
156 static tree
make_descriptor_field (const char *,tree
, tree
, tree
);
157 static int value_factor_p (tree
, int);
158 static int potential_alignment_gap (tree
, tree
, tree
);
160 /* Initialize the association of GNAT nodes to GCC trees. */
163 init_gnat_to_gnu (void)
165 associate_gnat_to_gnu
166 = (tree
*) ggc_alloc_cleared (max_gnat_nodes
* sizeof (tree
));
168 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
171 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
172 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
173 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
175 If GNU_DECL is zero, a previous association is to be reset. */
178 save_gnu_tree (Entity_Id gnat_entity
, tree gnu_decl
, int no_check
)
180 /* Check that GNAT_ENTITY is not already defined and that it is being set
181 to something which is a decl. Raise gigi 401 if not. Usually, this
182 means GNAT_ENTITY is defined twice, but occasionally is due to some
185 && (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
]
186 || (! no_check
&& ! DECL_P (gnu_decl
))))
189 associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] = gnu_decl
;
192 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
193 Return the ..._DECL node that was associated with it. If there is no tree
194 node associated with GNAT_ENTITY, abort.
196 In some cases, such as delayed elaboration or expressions that need to
197 be elaborated only once, GNAT_ENTITY is really not an entity. */
200 get_gnu_tree (Entity_Id gnat_entity
)
202 if (! associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
])
205 return associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
];
208 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
211 present_gnu_tree (Entity_Id gnat_entity
)
213 return (associate_gnat_to_gnu
[gnat_entity
- First_Node_Id
] != NULL_TREE
);
217 /* Return non-zero if we are currently in the global binding level. */
220 global_bindings_p (void)
222 return (force_global
!= 0 || current_binding_level
== global_binding_level
226 /* Return the list of declarations in the current level. Note that this list
227 is in reverse order (it has to be so for back-end compatibility). */
232 return current_binding_level
->names
;
235 /* Nonzero if the current level needs to have a BLOCK made. */
240 return (current_binding_level
->names
!= 0);
243 /* Enter a new binding level. The input parameter is ignored, but has to be
244 specified for back-end compatibility. */
247 pushlevel (int ignore ATTRIBUTE_UNUSED
)
249 struct binding_level
*newlevel
= NULL
;
251 /* Reuse a struct for this binding level, if there is one. */
252 if (free_binding_level
)
254 newlevel
= free_binding_level
;
255 free_binding_level
= free_binding_level
->level_chain
;
259 = (struct binding_level
*) ggc_alloc (sizeof (struct binding_level
));
261 *newlevel
= clear_binding_level
;
263 /* Add this level to the front of the chain (stack) of levels that are
265 newlevel
->level_chain
= current_binding_level
;
266 current_binding_level
= newlevel
;
269 /* Exit a binding level.
270 Pop the level off, and restore the state of the identifier-decl mappings
271 that were in effect when this level was entered.
273 If KEEP is nonzero, this level had explicit declarations, so
274 and create a "block" (a BLOCK node) for the level
275 to record its declarations and subblocks for symbol table output.
277 If FUNCTIONBODY is nonzero, this level is the body of a function,
278 so create a block as if KEEP were set and also clear out all
281 If REVERSE is nonzero, reverse the order of decls before putting
282 them into the BLOCK. */
285 poplevel (int keep
, int reverse
, int functionbody
)
287 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
288 binding level that we are about to exit and which is returned by this
290 tree block
= NULL_TREE
;
293 tree subblock_chain
= current_binding_level
->blocks
;
295 int block_previously_created
;
297 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
298 nodes chained through the `names' field of current_binding_level are in
299 reverse order except for PARM_DECL node, which are explicitly stored in
301 current_binding_level
->names
302 = decl_chain
= (reverse
) ? nreverse (current_binding_level
->names
)
303 : current_binding_level
->names
;
305 /* Output any nested inline functions within this block which must be
306 compiled because their address is needed. */
307 for (decl_node
= decl_chain
; decl_node
; decl_node
= TREE_CHAIN (decl_node
))
308 if (TREE_CODE (decl_node
) == FUNCTION_DECL
309 && ! TREE_ASM_WRITTEN (decl_node
) && TREE_ADDRESSABLE (decl_node
)
310 && DECL_INITIAL (decl_node
) != 0)
312 push_function_context ();
313 /* ??? This is temporary. */
315 output_inline_function (decl_node
);
317 pop_function_context ();
321 block_previously_created
= (current_binding_level
->this_block
!= 0);
322 if (block_previously_created
)
323 block
= current_binding_level
->this_block
;
324 else if (keep
|| functionbody
)
325 block
= make_node (BLOCK
);
328 BLOCK_VARS (block
) = keep
? decl_chain
: 0;
329 BLOCK_SUBBLOCKS (block
) = subblock_chain
;
332 /* Record the BLOCK node just built as the subblock its enclosing scope. */
333 for (subblock_node
= subblock_chain
; subblock_node
;
334 subblock_node
= TREE_CHAIN (subblock_node
))
335 BLOCK_SUPERCONTEXT (subblock_node
) = block
;
337 /* Clear out the meanings of the local variables of this level. */
339 for (subblock_node
= decl_chain
; subblock_node
;
340 subblock_node
= TREE_CHAIN (subblock_node
))
341 if (DECL_NAME (subblock_node
) != 0)
342 /* If the identifier was used or addressed via a local extern decl,
343 don't forget that fact. */
344 if (DECL_EXTERNAL (subblock_node
))
346 if (TREE_USED (subblock_node
))
347 TREE_USED (DECL_NAME (subblock_node
)) = 1;
348 if (TREE_ADDRESSABLE (subblock_node
))
349 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node
)) = 1;
353 /* Pop the current level, and free the structure for reuse. */
354 struct binding_level
*level
= current_binding_level
;
355 current_binding_level
= current_binding_level
->level_chain
;
356 level
->level_chain
= free_binding_level
;
357 free_binding_level
= level
;
362 /* This is the top level block of a function. The ..._DECL chain stored
363 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
364 leave them in the BLOCK because they are found in the FUNCTION_DECL
366 DECL_INITIAL (current_function_decl
) = block
;
367 BLOCK_VARS (block
) = 0;
371 if (!block_previously_created
)
372 current_binding_level
->blocks
373 = chainon (current_binding_level
->blocks
, block
);
376 /* If we did not make a block for the level just exited, any blocks made for
377 inner levels (since they cannot be recorded as subblocks in that level)
378 must be carried forward so they will later become subblocks of something
380 else if (subblock_chain
)
381 current_binding_level
->blocks
382 = chainon (current_binding_level
->blocks
, subblock_chain
);
384 TREE_USED (block
) = 1;
389 /* Insert BLOCK at the end of the list of subblocks of the
390 current binding level. This is used when a BIND_EXPR is expanded,
391 to handle the BLOCK node inside the BIND_EXPR. */
394 insert_block (tree block
)
396 TREE_USED (block
) = 1;
397 current_binding_level
->blocks
398 = chainon (current_binding_level
->blocks
, block
);
401 /* Set the BLOCK node for the innermost scope
402 (the one we are currently in). */
405 set_block (tree block
)
407 current_binding_level
->this_block
= block
;
408 current_binding_level
->names
= chainon (current_binding_level
->names
,
410 current_binding_level
->blocks
= chainon (current_binding_level
->blocks
,
411 BLOCK_SUBBLOCKS (block
));
414 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
415 Returns the ..._DECL node. */
420 struct binding_level
*b
;
422 /* If at top level, there is no context. But PARM_DECLs always go in the
423 level of its function. */
424 if (global_bindings_p () && TREE_CODE (decl
) != PARM_DECL
)
426 b
= global_binding_level
;
427 DECL_CONTEXT (decl
) = 0;
431 b
= current_binding_level
;
432 DECL_CONTEXT (decl
) = current_function_decl
;
435 /* Put the declaration on the list. The list of declarations is in reverse
436 order. The list will be reversed later if necessary. This needs to be
437 this way for compatibility with the back-end.
439 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
440 will cause trouble with the debugger and aren't needed anyway. */
441 if (TREE_CODE (decl
) != TYPE_DECL
442 || TREE_CODE (TREE_TYPE (decl
)) != UNCONSTRAINED_ARRAY_TYPE
)
444 TREE_CHAIN (decl
) = b
->names
;
448 /* For the declaration of a type, set its name if it either is not already
449 set, was set to an IDENTIFIER_NODE, indicating an internal name,
450 or if the previous type name was not derived from a source name.
451 We'd rather have the type named with a real name and all the pointer
452 types to the same object have the same POINTER_TYPE node. Code in this
453 function in c-decl.c makes a copy of the type node here, but that may
454 cause us trouble with incomplete types, so let's not try it (at least
457 if (TREE_CODE (decl
) == TYPE_DECL
458 && DECL_NAME (decl
) != 0
459 && (TYPE_NAME (TREE_TYPE (decl
)) == 0
460 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == IDENTIFIER_NODE
461 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl
))) == TYPE_DECL
462 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl
)))
463 && ! DECL_ARTIFICIAL (decl
))))
464 TYPE_NAME (TREE_TYPE (decl
)) = decl
;
469 /* Do little here. Set up the standard declarations later after the
470 front end has been run. */
473 gnat_init_decl_processing (void)
477 /* Make the binding_level structure for global names. */
478 current_function_decl
= 0;
479 current_binding_level
= 0;
480 free_binding_level
= 0;
482 global_binding_level
= current_binding_level
;
484 build_common_tree_nodes (0);
486 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
487 corresponding to the size of Pmode. In most cases when ptr_mode and
488 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
489 far better code using the width of Pmode. Make this here since we need
490 this before we can expand the GNAT types. */
491 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode
), 0));
492 build_common_tree_nodes_2 (0);
494 pushdecl (build_decl (TYPE_DECL
, get_identifier (SIZE_TYPE
), sizetype
));
496 /* We need to make the integer type before doing anything else.
497 We stitch this in to the appropriate GNAT type later. */
498 pushdecl (build_decl (TYPE_DECL
, get_identifier ("integer"),
500 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned char"),
503 ptr_void_type_node
= build_pointer_type (void_type_node
);
507 /* Create the predefined scalar types such as `integer_type_node' needed
508 in the gcc back-end and initialize the global binding level. */
511 init_gigi_decls (tree long_long_float_type
, tree exception_type
)
516 /* Set the types that GCC and Gigi use from the front end. We would like
517 to do this for char_type_node, but it needs to correspond to the C
519 if (TREE_CODE (TREE_TYPE (long_long_float_type
)) == INTEGER_TYPE
)
521 /* In this case, the builtin floating point types are VAX float,
522 so make up a type for use. */
523 longest_float_type_node
= make_node (REAL_TYPE
);
524 TYPE_PRECISION (longest_float_type_node
) = LONG_DOUBLE_TYPE_SIZE
;
525 layout_type (longest_float_type_node
);
526 pushdecl (build_decl (TYPE_DECL
, get_identifier ("longest float type"),
527 longest_float_type_node
));
530 longest_float_type_node
= TREE_TYPE (long_long_float_type
);
532 except_type_node
= TREE_TYPE (exception_type
);
534 unsigned_type_node
= gnat_type_for_size (INT_TYPE_SIZE
, 1);
535 pushdecl (build_decl (TYPE_DECL
, get_identifier ("unsigned int"),
536 unsigned_type_node
));
539 = pushdecl (build_decl (TYPE_DECL
, get_identifier ("void"),
542 void_ftype
= build_function_type (void_type_node
, NULL_TREE
);
543 ptr_void_ftype
= build_pointer_type (void_ftype
);
545 /* Now declare runtime functions. */
546 endlink
= tree_cons (NULL_TREE
, void_type_node
, NULL_TREE
);
548 /* malloc is a function declaration tree for a function to allocate
550 malloc_decl
= create_subprog_decl (get_identifier ("__gnat_malloc"),
552 build_function_type (ptr_void_type_node
,
553 tree_cons (NULL_TREE
,
556 NULL_TREE
, 0, 1, 1, 0);
558 /* free is a function declaration tree for a function to free memory. */
560 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE
,
561 build_function_type (void_type_node
,
562 tree_cons (NULL_TREE
,
565 NULL_TREE
, 0, 1, 1, 0);
567 /* Make the types and functions used for exception processing. */
569 = build_array_type (gnat_type_for_mode (Pmode
, 0),
570 build_index_type (build_int_2 (5, 0)));
571 pushdecl (build_decl (TYPE_DECL
, get_identifier ("JMPBUF_T"), jmpbuf_type
));
572 jmpbuf_ptr_type
= build_pointer_type (jmpbuf_type
);
574 /* Functions to get and set the jumpbuf pointer for the current thread. */
576 = create_subprog_decl
577 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
578 NULL_TREE
, build_function_type (jmpbuf_ptr_type
, NULL_TREE
),
579 NULL_TREE
, 0, 1, 1, 0);
582 = create_subprog_decl
583 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
585 build_function_type (void_type_node
,
586 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
587 NULL_TREE
, 0, 1, 1, 0);
589 /* Function to get the current exception. */
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__get_gnat_exception"),
594 build_function_type (build_pointer_type (except_type_node
), NULL_TREE
),
595 NULL_TREE
, 0, 1, 1, 0);
597 /* Functions that raise exceptions. */
599 = create_subprog_decl
600 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE
,
601 build_function_type (void_type_node
,
602 tree_cons (NULL_TREE
,
603 build_pointer_type (except_type_node
),
605 NULL_TREE
, 0, 1, 1, 0);
607 /* Hooks to call when entering/leaving an exception handler. */
609 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE
,
610 build_function_type (void_type_node
,
611 tree_cons (NULL_TREE
,
614 NULL_TREE
, 0, 1, 1, 0);
617 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE
,
618 build_function_type (void_type_node
,
619 tree_cons (NULL_TREE
,
622 NULL_TREE
, 0, 1, 1, 0);
624 /* If in no exception handlers mode, all raise statements are redirected to
625 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
626 this procedure will never be called in this mode. */
627 if (No_Exception_Handlers_Set ())
630 = create_subprog_decl
631 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE
,
632 build_function_type (void_type_node
,
633 tree_cons (NULL_TREE
,
634 build_pointer_type (char_type_node
),
635 tree_cons (NULL_TREE
,
638 NULL_TREE
, 0, 1, 1, 0);
640 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
641 gnat_raise_decls
[i
] = decl
;
644 /* Otherwise, make one decl for each exception reason. */
645 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
649 sprintf (name
, "__gnat_rcheck_%.2d", i
);
651 = create_subprog_decl
652 (get_identifier (name
), NULL_TREE
,
653 build_function_type (void_type_node
,
654 tree_cons (NULL_TREE
,
657 tree_cons (NULL_TREE
,
660 NULL_TREE
, 0, 1, 1, 0);
663 /* Indicate that these never return. */
664 TREE_THIS_VOLATILE (raise_nodefer_decl
) = 1;
665 TREE_SIDE_EFFECTS (raise_nodefer_decl
) = 1;
666 TREE_TYPE (raise_nodefer_decl
)
667 = build_qualified_type (TREE_TYPE (raise_nodefer_decl
),
670 for (i
= 0; i
< ARRAY_SIZE (gnat_raise_decls
); i
++)
672 TREE_THIS_VOLATILE (gnat_raise_decls
[i
]) = 1;
673 TREE_SIDE_EFFECTS (gnat_raise_decls
[i
]) = 1;
674 TREE_TYPE (gnat_raise_decls
[i
])
675 = build_qualified_type (TREE_TYPE (gnat_raise_decls
[i
]),
679 /* setjmp returns an integer and has one operand, which is a pointer to
682 = create_subprog_decl
683 (get_identifier ("__builtin_setjmp"), NULL_TREE
,
684 build_function_type (integer_type_node
,
685 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
686 NULL_TREE
, 0, 1, 1, 0);
688 DECL_BUILT_IN_CLASS (setjmp_decl
) = BUILT_IN_NORMAL
;
689 DECL_FUNCTION_CODE (setjmp_decl
) = BUILT_IN_SETJMP
;
691 /* update_setjmp_buf updates a setjmp buffer from the current stack pointer
693 update_setjmp_buf_decl
694 = create_subprog_decl
695 (get_identifier ("__builtin_update_setjmp_buf"), NULL_TREE
,
696 build_function_type (void_type_node
,
697 tree_cons (NULL_TREE
, jmpbuf_ptr_type
, endlink
)),
698 NULL_TREE
, 0, 1, 1, 0);
700 DECL_BUILT_IN_CLASS (update_setjmp_buf_decl
) = BUILT_IN_NORMAL
;
701 DECL_FUNCTION_CODE (update_setjmp_buf_decl
) = BUILT_IN_UPDATE_SETJMP_BUF
;
703 main_identifier_node
= get_identifier ("main");
706 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
707 nodes (FIELDLIST), finish constructing the record or union type.
708 If HAS_REP is nonzero, this record has a rep clause; don't call
709 layout_type but merely set the size and alignment ourselves.
710 If DEFER_DEBUG is nonzero, do not call the debugging routines
711 on this type; it will be done later. */
714 finish_record_type (tree record_type
,
719 enum tree_code code
= TREE_CODE (record_type
);
720 tree ada_size
= bitsize_zero_node
;
721 tree size
= bitsize_zero_node
;
722 tree size_unit
= size_zero_node
;
726 TYPE_FIELDS (record_type
) = fieldlist
;
728 if (TYPE_NAME (record_type
) != 0
729 && TREE_CODE (TYPE_NAME (record_type
)) == TYPE_DECL
)
730 TYPE_STUB_DECL (record_type
) = TYPE_NAME (record_type
);
732 TYPE_STUB_DECL (record_type
)
733 = pushdecl (build_decl (TYPE_DECL
, TYPE_NAME (record_type
),
736 /* We don't need both the typedef name and the record name output in
737 the debugging information, since they are the same. */
738 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type
)) = 1;
740 /* Globally initialize the record first. If this is a rep'ed record,
741 that just means some initializations; otherwise, layout the record. */
745 TYPE_ALIGN (record_type
) = MAX (BITS_PER_UNIT
, TYPE_ALIGN (record_type
));
746 TYPE_MODE (record_type
) = BLKmode
;
747 if (TYPE_SIZE (record_type
) == 0)
749 TYPE_SIZE (record_type
) = bitsize_zero_node
;
750 TYPE_SIZE_UNIT (record_type
) = size_zero_node
;
752 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
753 out just like a UNION_TYPE, since the size will be fixed. */
754 else if (code
== QUAL_UNION_TYPE
)
759 /* Ensure there isn't a size already set. There can be in an error
760 case where there is a rep clause but all fields have errors and
761 no longer have a position. */
762 TYPE_SIZE (record_type
) = 0;
763 layout_type (record_type
);
766 /* At this point, the position and size of each field is known. It was
767 either set before entry by a rep clause, or by laying out the type above.
769 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
770 to compute the Ada size; the GCC size and alignment (for rep'ed records
771 that are not padding types); and the mode (for rep'ed records). We also
772 clear the DECL_BIT_FIELD indication for the cases we know have not been
773 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
775 if (code
== QUAL_UNION_TYPE
)
776 fieldlist
= nreverse (fieldlist
);
778 for (field
= fieldlist
; field
; field
= TREE_CHAIN (field
))
780 tree pos
= bit_position (field
);
782 tree type
= TREE_TYPE (field
);
783 tree this_size
= DECL_SIZE (field
);
784 tree this_size_unit
= DECL_SIZE_UNIT (field
);
785 tree this_ada_size
= DECL_SIZE (field
);
787 /* We need to make an XVE/XVU record if any field has variable size,
788 whether or not the record does. For example, if we have an union,
789 it may be that all fields, rounded up to the alignment, have the
790 same size, in which case we'll use that size. But the debug
791 output routines (except Dwarf2) won't be able to output the fields,
792 so we need to make the special record. */
793 if (TREE_CODE (this_size
) != INTEGER_CST
)
796 if ((TREE_CODE (type
) == RECORD_TYPE
|| TREE_CODE (type
) == UNION_TYPE
797 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
798 && ! TYPE_IS_FAT_POINTER_P (type
)
799 && ! TYPE_CONTAINS_TEMPLATE_P (type
)
800 && TYPE_ADA_SIZE (type
) != 0)
801 this_ada_size
= TYPE_ADA_SIZE (type
);
803 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
804 if (DECL_BIT_FIELD (field
) && !STRICT_ALIGNMENT
805 && value_factor_p (pos
, BITS_PER_UNIT
)
806 && operand_equal_p (this_size
, TYPE_SIZE (type
), 0))
807 DECL_BIT_FIELD (field
) = 0;
809 /* If we still have DECL_BIT_FIELD set at this point, we know the field
810 is technically not addressable. Except that it can actually be
811 addressed if the field is BLKmode and happens to be properly
813 DECL_NONADDRESSABLE_P (field
)
814 |= DECL_BIT_FIELD (field
) && DECL_MODE (field
) != BLKmode
;
816 if (has_rep
&& ! DECL_BIT_FIELD (field
))
817 TYPE_ALIGN (record_type
)
818 = MAX (TYPE_ALIGN (record_type
), DECL_ALIGN (field
));
823 ada_size
= size_binop (MAX_EXPR
, ada_size
, this_ada_size
);
824 size
= size_binop (MAX_EXPR
, size
, this_size
);
825 size_unit
= size_binop (MAX_EXPR
, size_unit
, this_size_unit
);
828 case QUAL_UNION_TYPE
:
830 = fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
831 this_ada_size
, ada_size
));
832 size
= fold (build (COND_EXPR
, bitsizetype
, DECL_QUALIFIER (field
),
834 size_unit
= fold (build (COND_EXPR
, sizetype
, DECL_QUALIFIER (field
),
835 this_size_unit
, size_unit
));
839 /* Since we know here that all fields are sorted in order of
840 increasing bit position, the size of the record is one
841 higher than the ending bit of the last field processed
842 unless we have a rep clause, since in that case we might
843 have a field outside a QUAL_UNION_TYPE that has a higher ending
844 position. So use a MAX in that case. Also, if this field is a
845 QUAL_UNION_TYPE, we need to take into account the previous size in
846 the case of empty variants. */
848 = merge_sizes (ada_size
, pos
, this_ada_size
,
849 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
850 size
= merge_sizes (size
, pos
, this_size
,
851 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
853 = merge_sizes (size_unit
, byte_position (field
), this_size_unit
,
854 TREE_CODE (type
) == QUAL_UNION_TYPE
, has_rep
);
862 if (code
== QUAL_UNION_TYPE
)
863 nreverse (fieldlist
);
865 /* If this is a padding record, we never want to make the size smaller than
866 what was specified in it, if any. */
867 if (TREE_CODE (record_type
) == RECORD_TYPE
868 && TYPE_IS_PADDING_P (record_type
) && TYPE_SIZE (record_type
) != 0)
870 size
= TYPE_SIZE (record_type
);
871 size_unit
= TYPE_SIZE_UNIT (record_type
);
874 /* Now set any of the values we've just computed that apply. */
875 if (! TYPE_IS_FAT_POINTER_P (record_type
)
876 && ! TYPE_CONTAINS_TEMPLATE_P (record_type
))
877 SET_TYPE_ADA_SIZE (record_type
, ada_size
);
881 if (! (TREE_CODE (record_type
) == RECORD_TYPE
882 && TYPE_IS_PADDING_P (record_type
)
883 && CONTAINS_PLACEHOLDER_P (size
)))
885 TYPE_SIZE (record_type
) = round_up (size
, TYPE_ALIGN (record_type
));
886 TYPE_SIZE_UNIT (record_type
)
887 = round_up (size_unit
,
888 TYPE_ALIGN (record_type
) / BITS_PER_UNIT
);
891 compute_record_mode (record_type
);
896 /* If this record is of variable size, rename it so that the
897 debugger knows it is and make a new, parallel, record
898 that tells the debugger how the record is laid out. See
899 exp_dbug.ads. But don't do this for records that are padding
900 since they confuse GDB. */
902 && ! (TREE_CODE (record_type
) == RECORD_TYPE
903 && TYPE_IS_PADDING_P (record_type
)))
906 = make_node (TREE_CODE (record_type
) == QUAL_UNION_TYPE
907 ? UNION_TYPE
: TREE_CODE (record_type
));
908 tree orig_id
= DECL_NAME (TYPE_STUB_DECL (record_type
));
910 = concat_id_with_name (orig_id
,
911 TREE_CODE (record_type
) == QUAL_UNION_TYPE
913 tree last_pos
= bitsize_zero_node
;
915 tree prev_old_field
= 0;
917 TYPE_NAME (new_record_type
) = new_id
;
918 TYPE_ALIGN (new_record_type
) = BIGGEST_ALIGNMENT
;
919 TYPE_STUB_DECL (new_record_type
)
920 = pushdecl (build_decl (TYPE_DECL
, new_id
, new_record_type
));
921 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type
)) = 1;
922 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type
))
923 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type
));
924 TYPE_SIZE (new_record_type
) = size_int (TYPE_ALIGN (record_type
));
926 /* Now scan all the fields, replacing each field with a new
927 field corresponding to the new encoding. */
928 for (old_field
= TYPE_FIELDS (record_type
); old_field
!= 0;
929 old_field
= TREE_CHAIN (old_field
))
931 tree field_type
= TREE_TYPE (old_field
);
932 tree field_name
= DECL_NAME (old_field
);
934 tree curpos
= bit_position (old_field
);
936 unsigned int align
= 0;
939 /* See how the position was modified from the last position.
941 There are two basic cases we support: a value was added
942 to the last position or the last position was rounded to
943 a boundary and they something was added. Check for the
944 first case first. If not, see if there is any evidence
945 of rounding. If so, round the last position and try
948 If this is a union, the position can be taken as zero. */
950 if (TREE_CODE (new_record_type
) == UNION_TYPE
)
951 pos
= bitsize_zero_node
, align
= 0;
953 pos
= compute_related_constant (curpos
, last_pos
);
955 if (pos
== 0 && TREE_CODE (curpos
) == MULT_EXPR
956 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
)
958 align
= TREE_INT_CST_LOW (TREE_OPERAND (curpos
, 1));
959 pos
= compute_related_constant (curpos
,
960 round_up (last_pos
, align
));
962 else if (pos
== 0 && TREE_CODE (curpos
) == PLUS_EXPR
963 && TREE_CODE (TREE_OPERAND (curpos
, 1)) == INTEGER_CST
964 && TREE_CODE (TREE_OPERAND (curpos
, 0)) == MULT_EXPR
965 && host_integerp (TREE_OPERAND
966 (TREE_OPERAND (curpos
, 0), 1),
971 (TREE_OPERAND (TREE_OPERAND (curpos
, 0), 1), 1);
972 pos
= compute_related_constant (curpos
,
973 round_up (last_pos
, align
));
975 else if (potential_alignment_gap (prev_old_field
, old_field
,
978 align
= TYPE_ALIGN (field_type
);
979 pos
= compute_related_constant (curpos
,
980 round_up (last_pos
, align
));
983 /* If we can't compute a position, set it to zero.
985 ??? We really should abort here, but it's too much work
986 to get this correct for all cases. */
989 pos
= bitsize_zero_node
;
991 /* See if this type is variable-size and make a new type
992 and indicate the indirection if so. */
993 if (TREE_CODE (DECL_SIZE (old_field
)) != INTEGER_CST
)
995 field_type
= build_pointer_type (field_type
);
999 /* Make a new field name, if necessary. */
1000 if (var
|| align
!= 0)
1005 sprintf (suffix
, "XV%c%u", var
? 'L' : 'A',
1006 align
/ BITS_PER_UNIT
);
1008 strcpy (suffix
, "XVL");
1010 field_name
= concat_id_with_name (field_name
, suffix
);
1013 new_field
= create_field_decl (field_name
, field_type
,
1015 DECL_SIZE (old_field
), pos
, 0);
1016 TREE_CHAIN (new_field
) = TYPE_FIELDS (new_record_type
);
1017 TYPE_FIELDS (new_record_type
) = new_field
;
1019 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1020 zero. The only time it's not the last field of the record
1021 is when there are other components at fixed positions after
1022 it (meaning there was a rep clause for every field) and we
1023 want to be able to encode them. */
1024 last_pos
= size_binop (PLUS_EXPR
, bit_position (old_field
),
1025 (TREE_CODE (TREE_TYPE (old_field
))
1028 : DECL_SIZE (old_field
));
1029 prev_old_field
= old_field
;
1032 TYPE_FIELDS (new_record_type
)
1033 = nreverse (TYPE_FIELDS (new_record_type
));
1035 rest_of_type_compilation (new_record_type
, global_bindings_p ());
1038 rest_of_type_compilation (record_type
, global_bindings_p ());
1042 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1043 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1044 if this represents a QUAL_UNION_TYPE in which case we must look for
1045 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1046 is nonzero, we must take the MAX of the end position of this field
1047 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1049 We return an expression for the size. */
1052 merge_sizes (tree last_size
,
1058 tree type
= TREE_TYPE (last_size
);
1061 if (! special
|| TREE_CODE (size
) != COND_EXPR
)
1063 new = size_binop (PLUS_EXPR
, first_bit
, size
);
1065 new = size_binop (MAX_EXPR
, last_size
, new);
1069 new = fold (build (COND_EXPR
, type
, TREE_OPERAND (size
, 0),
1070 integer_zerop (TREE_OPERAND (size
, 1))
1071 ? last_size
: merge_sizes (last_size
, first_bit
,
1072 TREE_OPERAND (size
, 1),
1074 integer_zerop (TREE_OPERAND (size
, 2))
1075 ? last_size
: merge_sizes (last_size
, first_bit
,
1076 TREE_OPERAND (size
, 2),
1079 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1080 when fed through substitute_in_expr) into thinking that a constant
1081 size is not constant. */
1082 while (TREE_CODE (new) == NON_LVALUE_EXPR
)
1083 new = TREE_OPERAND (new, 0);
1088 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1089 related by the addition of a constant. Return that constant if so. */
1092 compute_related_constant (tree op0
, tree op1
)
1094 tree op0_var
, op1_var
;
1095 tree op0_con
= split_plus (op0
, &op0_var
);
1096 tree op1_con
= split_plus (op1
, &op1_var
);
1097 tree result
= size_binop (MINUS_EXPR
, op0_con
, op1_con
);
1099 if (operand_equal_p (op0_var
, op1_var
, 0))
1101 else if (operand_equal_p (op0
, size_binop (PLUS_EXPR
, op1_var
, result
), 0))
1107 /* Utility function of above to split a tree OP which may be a sum, into a
1108 constant part, which is returned, and a variable part, which is stored
1109 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1113 split_plus (tree in
, tree
*pvar
)
1115 /* Strip NOPS in order to ease the tree traversal and maximize the
1116 potential for constant or plus/minus discovery. We need to be careful
1117 to always return and set *pvar to bitsizetype trees, but it's worth
1121 *pvar
= convert (bitsizetype
, in
);
1123 if (TREE_CODE (in
) == INTEGER_CST
)
1125 *pvar
= bitsize_zero_node
;
1126 return convert (bitsizetype
, in
);
1128 else if (TREE_CODE (in
) == PLUS_EXPR
|| TREE_CODE (in
) == MINUS_EXPR
)
1130 tree lhs_var
, rhs_var
;
1131 tree lhs_con
= split_plus (TREE_OPERAND (in
, 0), &lhs_var
);
1132 tree rhs_con
= split_plus (TREE_OPERAND (in
, 1), &rhs_var
);
1134 if (lhs_var
== TREE_OPERAND (in
, 0)
1135 && rhs_var
== TREE_OPERAND (in
, 1))
1136 return bitsize_zero_node
;
1138 *pvar
= size_binop (TREE_CODE (in
), lhs_var
, rhs_var
);
1139 return size_binop (TREE_CODE (in
), lhs_con
, rhs_con
);
1142 return bitsize_zero_node
;
1145 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1146 subprogram. If it is void_type_node, then we are dealing with a procedure,
1147 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1148 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1149 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1150 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1151 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1152 RETURNS_WITH_DSP is nonzero if the function is to return with a
1153 depressed stack pointer. */
1156 create_subprog_type (tree return_type
,
1157 tree param_decl_list
,
1159 int returns_unconstrained
,
1161 int returns_with_dsp
)
1163 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1164 the subprogram formal parameters. This list is generated by traversing the
1165 input list of PARM_DECL nodes. */
1166 tree param_type_list
= NULL
;
1170 for (param_decl
= param_decl_list
; param_decl
;
1171 param_decl
= TREE_CHAIN (param_decl
))
1172 param_type_list
= tree_cons (NULL_TREE
, TREE_TYPE (param_decl
),
1175 /* The list of the function parameter types has to be terminated by the void
1176 type to signal to the back-end that we are not dealing with a variable
1177 parameter subprogram, but that the subprogram has a fixed number of
1179 param_type_list
= tree_cons (NULL_TREE
, void_type_node
, param_type_list
);
1181 /* The list of argument types has been created in reverse
1183 param_type_list
= nreverse (param_type_list
);
1185 type
= build_function_type (return_type
, param_type_list
);
1187 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1188 or the new type should, make a copy of TYPE. Likewise for
1189 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1190 if (TYPE_CI_CO_LIST (type
) != 0 || cico_list
!= 0
1191 || TYPE_RETURNS_UNCONSTRAINED_P (type
) != returns_unconstrained
1192 || TYPE_RETURNS_BY_REF_P (type
) != returns_by_ref
)
1193 type
= copy_type (type
);
1195 SET_TYPE_CI_CO_LIST (type
, cico_list
);
1196 TYPE_RETURNS_UNCONSTRAINED_P (type
) = returns_unconstrained
;
1197 TYPE_RETURNS_STACK_DEPRESSED (type
) = returns_with_dsp
;
1198 TYPE_RETURNS_BY_REF_P (type
) = returns_by_ref
;
1202 /* Return a copy of TYPE but safe to modify in any way. */
1205 copy_type (tree type
)
1207 tree
new = copy_node (type
);
1209 /* copy_node clears this field instead of copying it, because it is
1210 aliased with TREE_CHAIN. */
1211 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type
);
1213 TYPE_POINTER_TO (new) = 0;
1214 TYPE_REFERENCE_TO (new) = 0;
1215 TYPE_MAIN_VARIANT (new) = new;
1216 TYPE_NEXT_VARIANT (new) = 0;
1221 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1222 TYPE_INDEX_TYPE is INDEX. */
1225 create_index_type (tree min
, tree max
, tree index
)
1227 /* First build a type for the desired range. */
1228 tree type
= build_index_2_type (min
, max
);
1230 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1231 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1232 is set, but not to INDEX, make a copy of this type with the requested
1233 index type. Note that we have no way of sharing these types, but that's
1234 only a small hole. */
1235 if (TYPE_INDEX_TYPE (type
) == index
)
1237 else if (TYPE_INDEX_TYPE (type
) != 0)
1238 type
= copy_type (type
);
1240 SET_TYPE_INDEX_TYPE (type
, index
);
1244 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1245 string) and TYPE is a ..._TYPE node giving its data type.
1246 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1247 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1248 information about this type. */
1251 create_type_decl (tree type_name
,
1253 struct attrib
*attr_list
,
1257 tree type_decl
= build_decl (TYPE_DECL
, type_name
, type
);
1258 enum tree_code code
= TREE_CODE (type
);
1260 DECL_ARTIFICIAL (type_decl
) = artificial_p
;
1261 pushdecl (type_decl
);
1262 process_attributes (type_decl
, attr_list
);
1264 /* Pass type declaration information to the debugger unless this is an
1265 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1266 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1267 a dummy type, which will be completed later, or a type for which
1268 debugging information was not requested. */
1269 if (code
== UNCONSTRAINED_ARRAY_TYPE
|| TYPE_IS_DUMMY_P (type
)
1271 DECL_IGNORED_P (type_decl
) = 1;
1272 else if (code
!= ENUMERAL_TYPE
&& code
!= RECORD_TYPE
1273 && ! ((code
== POINTER_TYPE
|| code
== REFERENCE_TYPE
)
1274 && TYPE_IS_DUMMY_P (TREE_TYPE (type
))))
1275 rest_of_decl_compilation (type_decl
, NULL
, global_bindings_p (), 0);
1280 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1281 ASM_NAME is its assembler name (if provided). TYPE is its data type
1282 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1283 expression; NULL_TREE if none.
1285 CONST_FLAG is nonzero if this variable is constant.
1287 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1288 the current compilation unit. This flag should be set when processing the
1289 variable definitions in a package specification. EXTERN_FLAG is nonzero
1290 when processing an external variable declaration (as opposed to a
1291 definition: no storage is to be allocated for the variable here).
1293 STATIC_FLAG is only relevant when not at top level. In that case
1294 it indicates whether to always allocate storage to the variable. */
1297 create_var_decl (tree var_name
,
1305 struct attrib
*attr_list
)
1310 : (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1311 && (global_bindings_p () || static_flag
1312 ? 0 != initializer_constant_valid_p (var_init
,
1313 TREE_TYPE (var_init
))
1314 : TREE_CONSTANT (var_init
))));
1316 = build_decl ((const_flag
&& init_const
1317 /* Only make a CONST_DECL for sufficiently-small objects.
1318 We consider complex double "sufficiently-small" */
1319 && TYPE_SIZE (type
) != 0
1320 && host_integerp (TYPE_SIZE_UNIT (type
), 1)
1321 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type
),
1322 GET_MODE_SIZE (DCmode
)))
1323 ? CONST_DECL
: VAR_DECL
, var_name
, type
);
1324 tree assign_init
= 0;
1326 /* If this is external, throw away any initializations unless this is a
1327 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1328 we are defining a global here, leave a constant initialization and save
1329 any variable elaborations for the elaboration routine. Otherwise, if
1330 the initializing expression is not the same as TYPE, generate the
1331 initialization with an assignment statement, since it knows how
1332 to do the required adjustents. If we are just annotating types,
1333 throw away the initialization if it isn't a constant. */
1335 if ((extern_flag
&& TREE_CODE (var_decl
) != CONST_DECL
)
1336 || (type_annotate_only
&& var_init
!= 0 && ! TREE_CONSTANT (var_init
)))
1339 if (global_bindings_p () && var_init
!= 0 && ! init_const
)
1341 add_pending_elaborations (var_decl
, var_init
);
1345 else if (var_init
!= 0
1346 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init
))
1347 != TYPE_MAIN_VARIANT (type
))
1348 || (static_flag
&& ! init_const
)))
1349 assign_init
= var_init
, var_init
= 0;
1351 DECL_INITIAL (var_decl
) = var_init
;
1352 TREE_READONLY (var_decl
) = const_flag
;
1353 DECL_EXTERNAL (var_decl
) = extern_flag
;
1354 TREE_PUBLIC (var_decl
) = public_flag
|| extern_flag
;
1355 TREE_CONSTANT (var_decl
) = TREE_CODE (var_decl
) == CONST_DECL
;
1356 TREE_THIS_VOLATILE (var_decl
) = TREE_SIDE_EFFECTS (var_decl
)
1357 = TYPE_VOLATILE (type
);
1359 /* At the global binding level we need to allocate static storage for the
1360 variable if and only if its not external. If we are not at the top level
1361 we allocate automatic storage unless requested not to. */
1362 TREE_STATIC (var_decl
) = global_bindings_p () ? !extern_flag
: static_flag
;
1365 SET_DECL_ASSEMBLER_NAME (var_decl
, asm_name
);
1367 process_attributes (var_decl
, attr_list
);
1369 /* Add this decl to the current binding level and generate any
1370 needed code and RTL. */
1371 var_decl
= pushdecl (var_decl
);
1372 expand_decl (var_decl
);
1374 if (DECL_CONTEXT (var_decl
) != 0)
1375 expand_decl_init (var_decl
);
1377 /* If this is volatile, force it into memory. */
1378 if (TREE_SIDE_EFFECTS (var_decl
))
1379 gnat_mark_addressable (var_decl
);
1381 if (TREE_CODE (var_decl
) != CONST_DECL
)
1382 rest_of_decl_compilation (var_decl
, 0, global_bindings_p (), 0);
1384 if (assign_init
!= 0)
1386 /* If VAR_DECL has a padded type, convert it to the unpadded
1387 type so the assignment is done properly. */
1388 tree lhs
= var_decl
;
1390 if (TREE_CODE (TREE_TYPE (lhs
)) == RECORD_TYPE
1391 && TYPE_IS_PADDING_P (TREE_TYPE (lhs
)))
1392 lhs
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs
))), lhs
);
1394 expand_expr_stmt (build_binary_op (MODIFY_EXPR
, NULL_TREE
, lhs
,
1401 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1402 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1403 this field is in a record type with a "pragma pack". If SIZE is nonzero
1404 it is the specified size for this field. If POS is nonzero, it is the bit
1405 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1406 the address of this field for aliasing purposes. */
1409 create_field_decl (tree field_name
,
1417 tree field_decl
= build_decl (FIELD_DECL
, field_name
, field_type
);
1419 DECL_CONTEXT (field_decl
) = record_type
;
1420 TREE_READONLY (field_decl
) = TYPE_READONLY (field_type
);
1422 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1423 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1424 if (packed
&& TYPE_MODE (field_type
) == BLKmode
)
1425 DECL_ALIGN (field_decl
) = BITS_PER_UNIT
;
1427 /* If a size is specified, use it. Otherwise, if the record type is packed
1428 compute a size to use, which may differ from the object's natural size.
1429 We always set a size in this case to trigger the checks for bitfield
1430 creation below, which is typically required when no position has been
1433 size
= convert (bitsizetype
, size
);
1434 else if (packed
== 1)
1436 size
= rm_size (field_type
);
1438 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1440 if (TREE_CODE (size
) == INTEGER_CST
1441 && compare_tree_int (size
, MAX_FIXED_MODE_SIZE
) > 0)
1442 size
= round_up (size
, BITS_PER_UNIT
);
1445 /* Make a bitfield if a size is specified for two reasons: first if the size
1446 differs from the natural size. Second, if the alignment is insufficient.
1447 There are a number of ways the latter can be true.
1449 We never make a bitfield if the type of the field has a nonconstant size,
1450 or if it is claimed to be addressable, because no such entity requiring
1451 bitfield operations should reach here.
1453 We do *preventively* make a bitfield when there might be the need for it
1454 but we don't have all the necessary information to decide, as is the case
1455 of a field with no specified position in a packed record.
1457 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1458 in layout_decl or finish_record_type to clear the bit_field indication if
1459 it is in fact not needed. */
1460 if (size
!= 0 && TREE_CODE (size
) == INTEGER_CST
1461 && TREE_CODE (TYPE_SIZE (field_type
)) == INTEGER_CST
1463 && (! operand_equal_p (TYPE_SIZE (field_type
), size
, 0)
1465 && ! value_zerop (size_binop (TRUNC_MOD_EXPR
, pos
,
1466 bitsize_int (TYPE_ALIGN
1469 || (TYPE_ALIGN (record_type
) != 0
1470 && TYPE_ALIGN (record_type
) < TYPE_ALIGN (field_type
))))
1472 DECL_BIT_FIELD (field_decl
) = 1;
1473 DECL_SIZE (field_decl
) = size
;
1474 if (! packed
&& pos
== 0)
1475 DECL_ALIGN (field_decl
)
1476 = (TYPE_ALIGN (record_type
) != 0
1477 ? MIN (TYPE_ALIGN (record_type
), TYPE_ALIGN (field_type
))
1478 : TYPE_ALIGN (field_type
));
1481 DECL_PACKED (field_decl
) = pos
!= 0 ? DECL_BIT_FIELD (field_decl
) : packed
;
1482 DECL_ALIGN (field_decl
)
1483 = MAX (DECL_ALIGN (field_decl
),
1484 DECL_BIT_FIELD (field_decl
) ? 1
1485 : packed
&& TYPE_MODE (field_type
) != BLKmode
? BITS_PER_UNIT
1486 : TYPE_ALIGN (field_type
));
1490 /* We need to pass in the alignment the DECL is known to have.
1491 This is the lowest-order bit set in POS, but no more than
1492 the alignment of the record, if one is specified. Note
1493 that an alignment of 0 is taken as infinite. */
1494 unsigned int known_align
;
1496 if (host_integerp (pos
, 1))
1497 known_align
= tree_low_cst (pos
, 1) & - tree_low_cst (pos
, 1);
1499 known_align
= BITS_PER_UNIT
;
1501 if (TYPE_ALIGN (record_type
)
1502 && (known_align
== 0 || known_align
> TYPE_ALIGN (record_type
)))
1503 known_align
= TYPE_ALIGN (record_type
);
1505 layout_decl (field_decl
, known_align
);
1506 SET_DECL_OFFSET_ALIGN (field_decl
,
1507 host_integerp (pos
, 1) ? BIGGEST_ALIGNMENT
1509 pos_from_bit (&DECL_FIELD_OFFSET (field_decl
),
1510 &DECL_FIELD_BIT_OFFSET (field_decl
),
1511 DECL_OFFSET_ALIGN (field_decl
), pos
);
1513 DECL_HAS_REP_P (field_decl
) = 1;
1516 /* If the field type is passed by reference, we will have pointers to the
1517 field, so it is addressable. */
1518 if (must_pass_by_ref (field_type
) || default_pass_by_ref (field_type
))
1521 /* ??? For now, we say that any field of aggregate type is addressable
1522 because the front end may take 'Reference of it. */
1523 if (AGGREGATE_TYPE_P (field_type
))
1526 /* Mark the decl as nonaddressable if it is indicated so semantically,
1527 meaning we won't ever attempt to take the address of the field.
1529 It may also be "technically" nonaddressable, meaning that even if we
1530 attempt to take the field's address we will actually get the address of a
1531 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1532 we have at this point is not accurate enough, so we don't account for
1533 this here and let finish_record_type decide. */
1534 DECL_NONADDRESSABLE_P (field_decl
) = ! addressable
;
1539 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1540 effects, has the value of zero. */
1543 value_zerop (tree exp
)
1545 if (TREE_CODE (exp
) == COMPOUND_EXPR
)
1546 return value_zerop (TREE_OPERAND (exp
, 1));
1548 return integer_zerop (exp
);
1551 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1552 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1553 readonly (either an IN parameter or an address of a pass-by-ref
1557 create_param_decl (tree param_name
, tree param_type
, int readonly
)
1559 tree param_decl
= build_decl (PARM_DECL
, param_name
, param_type
);
1561 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1562 lead to various ABI violations. */
1563 if (targetm
.calls
.promote_prototypes (param_type
)
1564 && (TREE_CODE (param_type
) == INTEGER_TYPE
1565 || TREE_CODE (param_type
) == ENUMERAL_TYPE
)
1566 && TYPE_PRECISION (param_type
) < TYPE_PRECISION (integer_type_node
))
1568 /* We have to be careful about biased types here. Make a subtype
1569 of integer_type_node with the proper biasing. */
1570 if (TREE_CODE (param_type
) == INTEGER_TYPE
1571 && TYPE_BIASED_REPRESENTATION_P (param_type
))
1574 = copy_type (build_range_type (integer_type_node
,
1575 TYPE_MIN_VALUE (param_type
),
1576 TYPE_MAX_VALUE (param_type
)));
1578 TYPE_BIASED_REPRESENTATION_P (param_type
) = 1;
1581 param_type
= integer_type_node
;
1584 DECL_ARG_TYPE (param_decl
) = param_type
;
1585 DECL_ARG_TYPE_AS_WRITTEN (param_decl
) = param_type
;
1586 TREE_READONLY (param_decl
) = readonly
;
1590 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1593 process_attributes (tree decl
, struct attrib
*attr_list
)
1595 for (; attr_list
; attr_list
= attr_list
->next
)
1596 switch (attr_list
->type
)
1598 case ATTR_MACHINE_ATTRIBUTE
:
1599 decl_attributes (&decl
, tree_cons (attr_list
->name
, attr_list
->arg
,
1601 ATTR_FLAG_TYPE_IN_PLACE
);
1604 case ATTR_LINK_ALIAS
:
1605 TREE_STATIC (decl
) = 1;
1606 assemble_alias (decl
, attr_list
->name
);
1609 case ATTR_WEAK_EXTERNAL
:
1611 declare_weak (decl
);
1613 post_error ("?weak declarations not supported on this target",
1614 attr_list
->error_point
);
1617 case ATTR_LINK_SECTION
:
1618 if (targetm
.have_named_sections
)
1620 DECL_SECTION_NAME (decl
)
1621 = build_string (IDENTIFIER_LENGTH (attr_list
->name
),
1622 IDENTIFIER_POINTER (attr_list
->name
));
1625 post_error ("?section attributes are not supported for this target",
1626 attr_list
->error_point
);
1631 /* Add some pending elaborations on the list. */
1634 add_pending_elaborations (tree var_decl
, tree var_init
)
1637 Check_Elaboration_Code_Allowed (error_gnat_node
);
1639 pending_elaborations
1640 = chainon (pending_elaborations
, build_tree_list (var_decl
, var_init
));
1643 /* Obtain any pending elaborations and clear the old list. */
1646 get_pending_elaborations (void)
1648 /* Each thing added to the list went on the end; we want it on the
1650 tree result
= TREE_CHAIN (pending_elaborations
);
1652 TREE_CHAIN (pending_elaborations
) = 0;
1656 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1660 value_factor_p (tree value
, int factor
)
1662 if (host_integerp (value
, 1))
1663 return tree_low_cst (value
, 1) % factor
== 0;
1665 if (TREE_CODE (value
) == MULT_EXPR
)
1666 return (value_factor_p (TREE_OPERAND (value
, 0), factor
)
1667 || value_factor_p (TREE_OPERAND (value
, 1), factor
));
1672 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1673 unless we can prove these 2 fields are laid out in such a way that no gap
1674 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1675 is the distance in bits between the end of PREV_FIELD and the starting
1676 position of CURR_FIELD. It is ignored if null. */
1679 potential_alignment_gap (tree prev_field
, tree curr_field
, tree offset
)
1681 /* If this is the first field of the record, there cannot be any gap */
1685 /* If the previous field is a union type, then return False: The only
1686 time when such a field is not the last field of the record is when
1687 there are other components at fixed positions after it (meaning there
1688 was a rep clause for every field), in which case we don't want the
1689 alignment constraint to override them. */
1690 if (TREE_CODE (TREE_TYPE (prev_field
)) == QUAL_UNION_TYPE
)
1693 /* If the distance between the end of prev_field and the begining of
1694 curr_field is constant, then there is a gap if the value of this
1695 constant is not null. */
1696 if (offset
&& host_integerp (offset
, 1))
1697 return (!integer_zerop (offset
));
1699 /* If the size and position of the previous field are constant,
1700 then check the sum of this size and position. There will be a gap
1701 iff it is not multiple of the current field alignment. */
1702 if (host_integerp (DECL_SIZE (prev_field
), 1)
1703 && host_integerp (bit_position (prev_field
), 1))
1704 return ((tree_low_cst (bit_position (prev_field
), 1)
1705 + tree_low_cst (DECL_SIZE (prev_field
), 1))
1706 % DECL_ALIGN (curr_field
) != 0);
1708 /* If both the position and size of the previous field are multiples
1709 of the current field alignment, there can not be any gap. */
1710 if (value_factor_p (bit_position (prev_field
), DECL_ALIGN (curr_field
))
1711 && value_factor_p (DECL_SIZE (prev_field
), DECL_ALIGN (curr_field
)))
1714 /* Fallback, return that there may be a potential gap */
1718 /* Return nonzero if there are pending elaborations. */
1721 pending_elaborations_p (void)
1723 return TREE_CHAIN (pending_elaborations
) != 0;
1726 /* Save a copy of the current pending elaboration list and make a new
1730 push_pending_elaborations (void)
1732 struct e_stack
*p
= (struct e_stack
*) ggc_alloc (sizeof (struct e_stack
));
1734 p
->next
= elist_stack
;
1735 p
->elab_list
= pending_elaborations
;
1737 pending_elaborations
= build_tree_list (NULL_TREE
, NULL_TREE
);
1740 /* Pop the stack of pending elaborations. */
1743 pop_pending_elaborations (void)
1745 struct e_stack
*p
= elist_stack
;
1747 pending_elaborations
= p
->elab_list
;
1748 elist_stack
= p
->next
;
1751 /* Return the current position in pending_elaborations so we can insert
1752 elaborations after that point. */
1755 get_elaboration_location (void)
1757 return tree_last (pending_elaborations
);
1760 /* Insert the current elaborations after ELAB, which is in some elaboration
1764 insert_elaboration_list (tree elab
)
1766 tree next
= TREE_CHAIN (elab
);
1768 if (TREE_CHAIN (pending_elaborations
))
1770 TREE_CHAIN (elab
) = TREE_CHAIN (pending_elaborations
);
1771 TREE_CHAIN (tree_last (pending_elaborations
)) = next
;
1772 TREE_CHAIN (pending_elaborations
) = 0;
1776 /* Returns a LABEL_DECL node for LABEL_NAME. */
1779 create_label_decl (tree label_name
)
1781 tree label_decl
= build_decl (LABEL_DECL
, label_name
, void_type_node
);
1783 DECL_CONTEXT (label_decl
) = current_function_decl
;
1784 DECL_MODE (label_decl
) = VOIDmode
;
1785 DECL_SOURCE_LOCATION (label_decl
) = input_location
;
1790 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1791 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1792 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1793 PARM_DECL nodes chained through the TREE_CHAIN field).
1795 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1796 appropriate fields in the FUNCTION_DECL. */
1799 create_subprog_decl (tree subprog_name
,
1802 tree param_decl_list
,
1806 struct attrib
*attr_list
)
1808 tree return_type
= TREE_TYPE (subprog_type
);
1809 tree subprog_decl
= build_decl (FUNCTION_DECL
, subprog_name
, subprog_type
);
1811 /* If this is a function nested inside an inlined external function, it
1812 means we aren't going to compile the outer function unless it is
1813 actually inlined, so do the same for us. */
1814 if (current_function_decl
!= 0 && DECL_INLINE (current_function_decl
)
1815 && DECL_EXTERNAL (current_function_decl
))
1818 DECL_EXTERNAL (subprog_decl
) = extern_flag
;
1819 TREE_PUBLIC (subprog_decl
) = public_flag
;
1820 DECL_INLINE (subprog_decl
) = inline_flag
;
1821 TREE_READONLY (subprog_decl
) = TYPE_READONLY (subprog_type
);
1822 TREE_THIS_VOLATILE (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1823 TREE_SIDE_EFFECTS (subprog_decl
) = TYPE_VOLATILE (subprog_type
);
1824 DECL_ARGUMENTS (subprog_decl
) = param_decl_list
;
1825 DECL_RESULT (subprog_decl
) = build_decl (RESULT_DECL
, 0, return_type
);
1828 SET_DECL_ASSEMBLER_NAME (subprog_decl
, asm_name
);
1830 process_attributes (subprog_decl
, attr_list
);
1832 /* Add this decl to the current binding level. */
1833 subprog_decl
= pushdecl (subprog_decl
);
1835 /* Output the assembler code and/or RTL for the declaration. */
1836 rest_of_decl_compilation (subprog_decl
, 0, global_bindings_p (), 0);
1838 return subprog_decl
;
1841 /* Count how deep we are into nested functions. This is because
1842 we shouldn't call the backend function context routines unless we
1843 are in a nested function. */
1845 static int function_nesting_depth
;
1847 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1848 body. This routine needs to be invoked before processing the declarations
1849 appearing in the subprogram. */
1852 begin_subprog_body (tree subprog_decl
)
1856 if (function_nesting_depth
++ != 0)
1857 push_function_context ();
1859 announce_function (subprog_decl
);
1861 /* Make this field nonzero so further routines know that this is not
1862 tentative. error_mark_node is replaced below (in poplevel) with the
1864 DECL_INITIAL (subprog_decl
) = error_mark_node
;
1866 /* This function exists in static storage. This does not mean `static' in
1868 TREE_STATIC (subprog_decl
) = 1;
1870 /* Enter a new binding level and show that all the parameters belong to
1872 current_function_decl
= subprog_decl
;
1875 for (param_decl
= DECL_ARGUMENTS (subprog_decl
); param_decl
;
1876 param_decl
= TREE_CHAIN (param_decl
))
1877 DECL_CONTEXT (param_decl
) = subprog_decl
;
1879 init_function_start (subprog_decl
);
1880 expand_function_start (subprog_decl
, 0);
1882 /* If this function is `main', emit a call to `__main'
1883 to run global initializers, etc. */
1884 if (DECL_ASSEMBLER_NAME (subprog_decl
) != 0
1885 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl
))
1886 && DECL_CONTEXT (subprog_decl
) == NULL_TREE
)
1887 expand_main_function ();
1890 /* Finish the definition of the current subprogram and compile it all the way
1891 to assembler language output. */
1894 end_subprog_body (void)
1900 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl
))
1901 = current_function_decl
;
1903 /* Mark the RESULT_DECL as being in this subprogram. */
1904 DECL_CONTEXT (DECL_RESULT (current_function_decl
)) = current_function_decl
;
1906 expand_function_end ();
1908 /* If this is a nested function, push a new GC context. That will keep
1909 local variables on the stack from being collected while we're doing
1910 the compilation of this function. */
1911 if (function_nesting_depth
> 1)
1912 ggc_push_context ();
1914 /* If we're only annotating types, don't actually compile this
1916 if (!type_annotate_only
)
1918 rest_of_compilation (current_function_decl
);
1919 if (! DECL_DEFER_OUTPUT (current_function_decl
))
1921 free_after_compilation (cfun
);
1922 DECL_STRUCT_FUNCTION (current_function_decl
) = 0;
1927 if (function_nesting_depth
> 1)
1930 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1931 not be seen when we call this function and will be in
1932 unallocated memory anyway. */
1933 for (cico_list
= TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl
));
1934 cico_list
!= 0; cico_list
= TREE_CHAIN (cico_list
))
1935 TREE_VALUE (cico_list
) = 0;
1937 if (DECL_STRUCT_FUNCTION (current_function_decl
) == 0)
1939 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1940 was saved for inline, in which case the DECL_RTLs are in
1941 preserved memory. */
1942 for (decl
= DECL_ARGUMENTS (current_function_decl
);
1943 decl
!= 0; decl
= TREE_CHAIN (decl
))
1945 SET_DECL_RTL (decl
, 0);
1946 DECL_INCOMING_RTL (decl
) = 0;
1949 /* Similarly, discard DECL_RTL of the return value. */
1950 SET_DECL_RTL (DECL_RESULT (current_function_decl
), 0);
1952 /* But DECL_INITIAL must remain nonzero so we know this
1953 was an actual function definition unless toplev.c decided not
1955 if (DECL_INITIAL (current_function_decl
) != 0)
1956 DECL_INITIAL (current_function_decl
) = error_mark_node
;
1958 DECL_ARGUMENTS (current_function_decl
) = 0;
1961 /* If we are not at the bottom of the function nesting stack, pop up to
1962 the containing function. Otherwise show we aren't in any function. */
1963 if (--function_nesting_depth
!= 0)
1964 pop_function_context ();
1966 current_function_decl
= 0;
1969 /* Return a definition for a builtin function named NAME and whose data type
1970 is TYPE. TYPE should be a function type with argument types.
1971 FUNCTION_CODE tells later passes how to compile calls to this function.
1972 See tree.h for its possible values.
1974 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1975 the name to be called if we can't opencode the function. If
1976 ATTRS is nonzero, use that for the function attribute list. */
1979 builtin_function (const char *name
,
1982 enum built_in_class
class,
1983 const char *library_name
,
1986 tree decl
= build_decl (FUNCTION_DECL
, get_identifier (name
), type
);
1988 DECL_EXTERNAL (decl
) = 1;
1989 TREE_PUBLIC (decl
) = 1;
1991 SET_DECL_ASSEMBLER_NAME (decl
, get_identifier (library_name
));
1994 DECL_BUILT_IN_CLASS (decl
) = class;
1995 DECL_FUNCTION_CODE (decl
) = function_code
;
1997 decl_attributes (&decl
, attrs
, ATTR_FLAG_BUILT_IN
);
2001 /* Return an integer type with the number of bits of precision given by
2002 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2003 it is a signed type. */
2006 gnat_type_for_size (unsigned precision
, int unsignedp
)
2011 if (precision
<= 2 * MAX_BITS_PER_WORD
2012 && signed_and_unsigned_types
[precision
][unsignedp
] != 0)
2013 return signed_and_unsigned_types
[precision
][unsignedp
];
2016 t
= make_unsigned_type (precision
);
2018 t
= make_signed_type (precision
);
2020 if (precision
<= 2 * MAX_BITS_PER_WORD
)
2021 signed_and_unsigned_types
[precision
][unsignedp
] = t
;
2023 if (TYPE_NAME (t
) == 0)
2025 sprintf (type_name
, "%sSIGNED_%d", unsignedp
? "UN" : "", precision
);
2026 TYPE_NAME (t
) = get_identifier (type_name
);
2032 /* Likewise for floating-point types. */
2035 float_type_for_precision (int precision
, enum machine_mode mode
)
2040 if (float_types
[(int) mode
] != 0)
2041 return float_types
[(int) mode
];
2043 float_types
[(int) mode
] = t
= make_node (REAL_TYPE
);
2044 TYPE_PRECISION (t
) = precision
;
2047 if (TYPE_MODE (t
) != mode
)
2050 if (TYPE_NAME (t
) == 0)
2052 sprintf (type_name
, "FLOAT_%d", precision
);
2053 TYPE_NAME (t
) = get_identifier (type_name
);
2059 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2060 an unsigned type; otherwise a signed type is returned. */
2063 gnat_type_for_mode (enum machine_mode mode
, int unsignedp
)
2065 if (mode
== BLKmode
)
2067 else if (mode
== VOIDmode
)
2068 return void_type_node
;
2069 else if (GET_MODE_CLASS (mode
) == MODE_FLOAT
)
2070 return float_type_for_precision (GET_MODE_PRECISION (mode
), mode
);
2072 return gnat_type_for_size (GET_MODE_BITSIZE (mode
), unsignedp
);
2075 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2078 gnat_unsigned_type (tree type_node
)
2080 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 1);
2082 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2084 type
= copy_node (type
);
2085 TREE_TYPE (type
) = type_node
;
2087 else if (TREE_TYPE (type_node
) != 0
2088 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2089 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2091 type
= copy_node (type
);
2092 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2098 /* Return the signed version of a TYPE_NODE, a scalar type. */
2101 gnat_signed_type (tree type_node
)
2103 tree type
= gnat_type_for_size (TYPE_PRECISION (type_node
), 0);
2105 if (TREE_CODE (type_node
) == INTEGER_TYPE
&& TYPE_MODULAR_P (type_node
))
2107 type
= copy_node (type
);
2108 TREE_TYPE (type
) = type_node
;
2110 else if (TREE_TYPE (type_node
) != 0
2111 && TREE_CODE (TREE_TYPE (type_node
)) == INTEGER_TYPE
2112 && TYPE_MODULAR_P (TREE_TYPE (type_node
)))
2114 type
= copy_node (type
);
2115 TREE_TYPE (type
) = TREE_TYPE (type_node
);
2121 /* Return a type the same as TYPE except unsigned or signed according to
2125 gnat_signed_or_unsigned_type (int unsignedp
, tree type
)
2127 if (! INTEGRAL_TYPE_P (type
) || TYPE_UNSIGNED (type
) == unsignedp
)
2130 return gnat_type_for_size (TYPE_PRECISION (type
), unsignedp
);
2133 /* EXP is an expression for the size of an object. If this size contains
2134 discriminant references, replace them with the maximum (if MAX_P) or
2135 minimum (if ! MAX_P) possible value of the discriminant. */
2138 max_size (tree exp
, int max_p
)
2140 enum tree_code code
= TREE_CODE (exp
);
2141 tree type
= TREE_TYPE (exp
);
2143 switch (TREE_CODE_CLASS (code
))
2150 if (code
== TREE_LIST
)
2151 return tree_cons (TREE_PURPOSE (exp
),
2152 max_size (TREE_VALUE (exp
), max_p
),
2153 TREE_CHAIN (exp
) != 0
2154 ? max_size (TREE_CHAIN (exp
), max_p
) : 0);
2158 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2159 modify. Otherwise, we treat it like a variable. */
2160 if (! CONTAINS_PLACEHOLDER_P (exp
))
2163 type
= TREE_TYPE (TREE_OPERAND (exp
, 1));
2165 max_size (max_p
? TYPE_MAX_VALUE (type
) : TYPE_MIN_VALUE (type
), 1);
2168 return max_p
? size_one_node
: size_zero_node
;
2173 switch (TREE_CODE_LENGTH (code
))
2176 if (code
== NON_LVALUE_EXPR
)
2177 return max_size (TREE_OPERAND (exp
, 0), max_p
);
2180 fold (build1 (code
, type
,
2181 max_size (TREE_OPERAND (exp
, 0),
2182 code
== NEGATE_EXPR
? ! max_p
: max_p
)));
2185 if (code
== RTL_EXPR
)
2187 else if (code
== COMPOUND_EXPR
)
2188 return max_size (TREE_OPERAND (exp
, 1), max_p
);
2191 tree lhs
= max_size (TREE_OPERAND (exp
, 0), max_p
);
2192 tree rhs
= max_size (TREE_OPERAND (exp
, 1),
2193 code
== MINUS_EXPR
? ! max_p
: max_p
);
2195 /* Special-case wanting the maximum value of a MIN_EXPR.
2196 In that case, if one side overflows, return the other.
2197 sizetype is signed, but we know sizes are non-negative.
2198 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2199 overflowing or the maximum possible value and the RHS
2201 if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (rhs
))
2203 else if (max_p
&& code
== MIN_EXPR
&& TREE_OVERFLOW (lhs
))
2205 else if ((code
== MINUS_EXPR
|| code
== PLUS_EXPR
)
2206 && ((TREE_CONSTANT (lhs
) && TREE_OVERFLOW (lhs
))
2207 || operand_equal_p (lhs
, TYPE_MAX_VALUE (type
), 0))
2208 && ! TREE_CONSTANT (rhs
))
2211 return fold (build (code
, type
, lhs
, rhs
));
2215 if (code
== SAVE_EXPR
)
2217 else if (code
== COND_EXPR
)
2218 return fold (build (max_p
? MAX_EXPR
: MIN_EXPR
, type
,
2219 max_size (TREE_OPERAND (exp
, 1), max_p
),
2220 max_size (TREE_OPERAND (exp
, 2), max_p
)));
2221 else if (code
== CALL_EXPR
&& TREE_OPERAND (exp
, 1) != 0)
2222 return build (CALL_EXPR
, type
, TREE_OPERAND (exp
, 0),
2223 max_size (TREE_OPERAND (exp
, 1), max_p
));
2230 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2231 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2232 Return a constructor for the template. */
2235 build_template (tree template_type
, tree array_type
, tree expr
)
2237 tree template_elts
= NULL_TREE
;
2238 tree bound_list
= NULL_TREE
;
2241 if (TREE_CODE (array_type
) == RECORD_TYPE
2242 && (TYPE_IS_PADDING_P (array_type
)
2243 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type
)))
2244 array_type
= TREE_TYPE (TYPE_FIELDS (array_type
));
2246 if (TREE_CODE (array_type
) == ARRAY_TYPE
2247 || (TREE_CODE (array_type
) == INTEGER_TYPE
2248 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type
)))
2249 bound_list
= TYPE_ACTUAL_BOUNDS (array_type
);
2251 /* First make the list for a CONSTRUCTOR for the template. Go down the
2252 field list of the template instead of the type chain because this
2253 array might be an Ada array of arrays and we can't tell where the
2254 nested arrays stop being the underlying object. */
2256 for (field
= TYPE_FIELDS (template_type
); field
;
2258 ? (bound_list
= TREE_CHAIN (bound_list
))
2259 : (array_type
= TREE_TYPE (array_type
))),
2260 field
= TREE_CHAIN (TREE_CHAIN (field
)))
2262 tree bounds
, min
, max
;
2264 /* If we have a bound list, get the bounds from there. Likewise
2265 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2266 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2267 This will give us a maximum range. */
2268 if (bound_list
!= 0)
2269 bounds
= TREE_VALUE (bound_list
);
2270 else if (TREE_CODE (array_type
) == ARRAY_TYPE
)
2271 bounds
= TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type
));
2272 else if (expr
!= 0 && TREE_CODE (expr
) == PARM_DECL
2273 && DECL_BY_COMPONENT_PTR_P (expr
))
2274 bounds
= TREE_TYPE (field
);
2278 min
= convert (TREE_TYPE (TREE_CHAIN (field
)), TYPE_MIN_VALUE (bounds
));
2279 max
= convert (TREE_TYPE (field
), TYPE_MAX_VALUE (bounds
));
2281 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2282 substitute it from OBJECT. */
2283 min
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (min
, expr
);
2284 max
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (max
, expr
);
2286 template_elts
= tree_cons (TREE_CHAIN (field
), max
,
2287 tree_cons (field
, min
, template_elts
));
2290 return gnat_build_constructor (template_type
, nreverse (template_elts
));
2293 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2294 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2295 in the type contains in its DECL_INITIAL the expression to use when
2296 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2297 to print out an error message if the mechanism cannot be applied to
2298 an object of that type and also for the name. */
2301 build_vms_descriptor (tree type
, Mechanism_Type mech
, Entity_Id gnat_entity
)
2303 tree record_type
= make_node (RECORD_TYPE
);
2304 tree field_list
= 0;
2313 /* If TYPE is an unconstrained array, use the underlying array type. */
2314 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
2315 type
= TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type
))));
2317 /* If this is an array, compute the number of dimensions in the array,
2318 get the index types, and point to the inner type. */
2319 if (TREE_CODE (type
) != ARRAY_TYPE
)
2322 for (ndim
= 1, inner_type
= type
;
2323 TREE_CODE (TREE_TYPE (inner_type
)) == ARRAY_TYPE
2324 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type
));
2325 ndim
++, inner_type
= TREE_TYPE (inner_type
))
2328 idx_arr
= (tree
*) alloca (ndim
* sizeof (tree
));
2330 if (mech
!= By_Descriptor_NCA
2331 && TREE_CODE (type
) == ARRAY_TYPE
&& TYPE_CONVENTION_FORTRAN_P (type
))
2332 for (i
= ndim
- 1, inner_type
= type
;
2334 i
--, inner_type
= TREE_TYPE (inner_type
))
2335 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2337 for (i
= 0, inner_type
= type
;
2339 i
++, inner_type
= TREE_TYPE (inner_type
))
2340 idx_arr
[i
] = TYPE_DOMAIN (inner_type
);
2342 /* Now get the DTYPE value. */
2343 switch (TREE_CODE (type
))
2347 if (TYPE_VAX_FLOATING_POINT_P (type
))
2348 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2361 switch (GET_MODE_BITSIZE (TYPE_MODE (type
)))
2364 dtype
= TYPE_UNSIGNED (type
) ? 2 : 6;
2367 dtype
= TYPE_UNSIGNED (type
) ? 3 : 7;
2370 dtype
= TYPE_UNSIGNED (type
) ? 4 : 8;
2373 dtype
= TYPE_UNSIGNED (type
) ? 5 : 9;
2376 dtype
= TYPE_UNSIGNED (type
) ? 25 : 26;
2382 dtype
= GET_MODE_BITSIZE (TYPE_MODE (type
)) == 32 ? 52 : 53;
2386 if (TREE_CODE (TREE_TYPE (type
)) == INTEGER_TYPE
2387 && TYPE_VAX_FLOATING_POINT_P (type
))
2388 switch (tree_low_cst (TYPE_DIGITS_VALUE (type
), 1))
2400 dtype
= GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type
))) == 32 ? 54: 55;
2411 /* Get the CLASS value. */
2414 case By_Descriptor_A
:
2417 case By_Descriptor_NCA
:
2420 case By_Descriptor_SB
:
2427 /* Make the type for a descriptor for VMS. The first four fields
2428 are the same for all types. */
2431 = chainon (field_list
,
2432 make_descriptor_field
2433 ("LENGTH", gnat_type_for_size (16, 1), record_type
,
2434 size_in_bytes (mech
== By_Descriptor_A
? inner_type
: type
)));
2436 field_list
= chainon (field_list
,
2437 make_descriptor_field ("DTYPE",
2438 gnat_type_for_size (8, 1),
2439 record_type
, size_int (dtype
)));
2440 field_list
= chainon (field_list
,
2441 make_descriptor_field ("CLASS",
2442 gnat_type_for_size (8, 1),
2443 record_type
, size_int (class)));
2446 = chainon (field_list
,
2447 make_descriptor_field ("POINTER",
2448 build_pointer_type (type
),
2451 build_pointer_type (type
),
2452 build (PLACEHOLDER_EXPR
,
2458 case By_Descriptor_S
:
2461 case By_Descriptor_SB
:
2463 = chainon (field_list
,
2464 make_descriptor_field
2465 ("SB_L1", gnat_type_for_size (32, 1), record_type
,
2466 TREE_CODE (type
) == ARRAY_TYPE
2467 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2469 = chainon (field_list
,
2470 make_descriptor_field
2471 ("SB_L2", gnat_type_for_size (32, 1), record_type
,
2472 TREE_CODE (type
) == ARRAY_TYPE
2473 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type
)) : size_zero_node
));
2476 case By_Descriptor_A
:
2477 case By_Descriptor_NCA
:
2478 field_list
= chainon (field_list
,
2479 make_descriptor_field ("SCALE",
2480 gnat_type_for_size (8, 1),
2484 field_list
= chainon (field_list
,
2485 make_descriptor_field ("DIGITS",
2486 gnat_type_for_size (8, 1),
2491 = chainon (field_list
,
2492 make_descriptor_field
2493 ("AFLAGS", gnat_type_for_size (8, 1), record_type
,
2494 size_int (mech
== By_Descriptor_NCA
2496 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2497 : (TREE_CODE (type
) == ARRAY_TYPE
2498 && TYPE_CONVENTION_FORTRAN_P (type
)
2501 field_list
= chainon (field_list
,
2502 make_descriptor_field ("DIMCT",
2503 gnat_type_for_size (8, 1),
2507 field_list
= chainon (field_list
,
2508 make_descriptor_field ("ARSIZE",
2509 gnat_type_for_size (32, 1),
2511 size_in_bytes (type
)));
2513 /* Now build a pointer to the 0,0,0... element. */
2514 tem
= build (PLACEHOLDER_EXPR
, type
);
2515 for (i
= 0, inner_type
= type
; i
< ndim
;
2516 i
++, inner_type
= TREE_TYPE (inner_type
))
2517 tem
= build (ARRAY_REF
, TREE_TYPE (inner_type
), tem
,
2518 convert (TYPE_DOMAIN (inner_type
), size_zero_node
));
2521 = chainon (field_list
,
2522 make_descriptor_field
2523 ("A0", build_pointer_type (inner_type
), record_type
,
2524 build1 (ADDR_EXPR
, build_pointer_type (inner_type
), tem
)));
2526 /* Next come the addressing coefficients. */
2528 for (i
= 0; i
< ndim
; i
++)
2532 = size_binop (MULT_EXPR
, tem
,
2533 size_binop (PLUS_EXPR
,
2534 size_binop (MINUS_EXPR
,
2535 TYPE_MAX_VALUE (idx_arr
[i
]),
2536 TYPE_MIN_VALUE (idx_arr
[i
])),
2539 fname
[0] = (mech
== By_Descriptor_NCA
? 'S' : 'M');
2540 fname
[1] = '0' + i
, fname
[2] = 0;
2542 = chainon (field_list
,
2543 make_descriptor_field (fname
,
2544 gnat_type_for_size (32, 1),
2545 record_type
, idx_length
));
2547 if (mech
== By_Descriptor_NCA
)
2551 /* Finally here are the bounds. */
2552 for (i
= 0; i
< ndim
; i
++)
2556 fname
[0] = 'L', fname
[1] = '0' + i
, fname
[2] = 0;
2558 = chainon (field_list
,
2559 make_descriptor_field
2560 (fname
, gnat_type_for_size (32, 1), record_type
,
2561 TYPE_MIN_VALUE (idx_arr
[i
])));
2565 = chainon (field_list
,
2566 make_descriptor_field
2567 (fname
, gnat_type_for_size (32, 1), record_type
,
2568 TYPE_MAX_VALUE (idx_arr
[i
])));
2573 post_error ("unsupported descriptor type for &", gnat_entity
);
2576 finish_record_type (record_type
, field_list
, 0, 1);
2577 pushdecl (build_decl (TYPE_DECL
, create_concat_name (gnat_entity
, "DESC"),
2583 /* Utility routine for above code to make a field. */
2586 make_descriptor_field (const char *name
, tree type
,
2587 tree rec_type
, tree initial
)
2590 = create_field_decl (get_identifier (name
), type
, rec_type
, 0, 0, 0, 0);
2592 DECL_INITIAL (field
) = initial
;
2596 /* Build a type to be used to represent an aliased object whose nominal
2597 type is an unconstrained array. This consists of a RECORD_TYPE containing
2598 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2599 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2600 is used to represent an arbitrary unconstrained object. Use NAME
2601 as the name of the record. */
2604 build_unc_object_type (tree template_type
, tree object_type
, tree name
)
2606 tree type
= make_node (RECORD_TYPE
);
2607 tree template_field
= create_field_decl (get_identifier ("BOUNDS"),
2608 template_type
, type
, 0, 0, 0, 1);
2609 tree array_field
= create_field_decl (get_identifier ("ARRAY"), object_type
,
2612 TYPE_NAME (type
) = name
;
2613 TYPE_CONTAINS_TEMPLATE_P (type
) = 1;
2614 finish_record_type (type
,
2615 chainon (chainon (NULL_TREE
, template_field
),
2622 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2623 the normal case this is just two adjustments, but we have more to do
2624 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2627 update_pointer_to (tree old_type
, tree new_type
)
2629 tree ptr
= TYPE_POINTER_TO (old_type
);
2630 tree ref
= TYPE_REFERENCE_TO (old_type
);
2633 /* If this is the main variant, process all the other variants first. */
2634 if (TYPE_MAIN_VARIANT (old_type
) == old_type
)
2635 for (type
= TYPE_NEXT_VARIANT (old_type
); type
!= 0;
2636 type
= TYPE_NEXT_VARIANT (type
))
2637 update_pointer_to (type
, new_type
);
2639 /* If no pointer or reference, we are done. */
2640 if (ptr
== 0 && ref
== 0)
2643 /* Merge the old type qualifiers in the new type.
2645 Each old variant has qualifiers for specific reasons, and the new
2646 designated type as well. Each set of qualifiers represents useful
2647 information grabbed at some point, and merging the two simply unifies
2648 these inputs into the final type description.
2650 Consider for instance a volatile type frozen after an access to constant
2651 type designating it. After the designated type freeze, we get here with a
2652 volatile new_type and a dummy old_type with a readonly variant, created
2653 when the access type was processed. We shall make a volatile and readonly
2654 designated type, because that's what it really is.
2656 We might also get here for a non-dummy old_type variant with different
2657 qualifiers than the new_type ones, for instance in some cases of pointers
2658 to private record type elaboration (see the comments around the call to
2659 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2660 qualifiers in thoses cases too, to avoid accidentally discarding the
2661 initial set, and will often end up with old_type == new_type then. */
2662 new_type
= build_qualified_type (new_type
,
2663 TYPE_QUALS (old_type
)
2664 | TYPE_QUALS (new_type
));
2666 /* If the new type and the old one are identical, there is nothing to
2668 if (old_type
== new_type
)
2671 /* Otherwise, first handle the simple case. */
2672 if (TREE_CODE (new_type
) != UNCONSTRAINED_ARRAY_TYPE
)
2674 TYPE_POINTER_TO (new_type
) = ptr
;
2675 TYPE_REFERENCE_TO (new_type
) = ref
;
2677 for (; ptr
; ptr
= TYPE_NEXT_PTR_TO (ptr
))
2679 TREE_TYPE (ptr
) = new_type
;
2681 if (TYPE_NAME (ptr
) != 0
2682 && TREE_CODE (TYPE_NAME (ptr
)) == TYPE_DECL
2683 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2684 rest_of_decl_compilation (TYPE_NAME (ptr
), NULL
,
2685 global_bindings_p (), 0);
2688 for (; ref
; ref
= TYPE_NEXT_PTR_TO (ref
))
2690 TREE_TYPE (ref
) = new_type
;
2692 if (TYPE_NAME (ref
) != 0
2693 && TREE_CODE (TYPE_NAME (ref
)) == TYPE_DECL
2694 && TREE_CODE (new_type
) != ENUMERAL_TYPE
)
2695 rest_of_decl_compilation (TYPE_NAME (ref
), NULL
,
2696 global_bindings_p (), 0);
2700 /* Now deal with the unconstrained array case. In this case the "pointer"
2701 is actually a RECORD_TYPE where the types of both fields are
2702 pointers to void. In that case, copy the field list from the
2703 old type to the new one and update the fields' context. */
2704 else if (TREE_CODE (ptr
) != RECORD_TYPE
|| ! TYPE_IS_FAT_POINTER_P (ptr
))
2709 tree new_obj_rec
= TYPE_OBJECT_RECORD_TYPE (new_type
);
2714 TYPE_FIELDS (ptr
) = TYPE_FIELDS (TYPE_POINTER_TO (new_type
));
2715 DECL_CONTEXT (TYPE_FIELDS (ptr
)) = ptr
;
2716 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr
))) = ptr
;
2718 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2721 ??? This is now the only use of gnat_substitute_in_type, which
2722 is now a very "heavy" routine to do this, so it should be replaced
2724 ptr_temp_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr
)));
2725 new_ref
= build (COMPONENT_REF
, ptr_temp_type
,
2726 build (PLACEHOLDER_EXPR
, ptr
),
2727 TREE_CHAIN (TYPE_FIELDS (ptr
)));
2730 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2731 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))),
2732 TREE_CHAIN (TYPE_FIELDS (ptr
)), new_ref
));
2734 for (var
= TYPE_MAIN_VARIANT (ptr
); var
; var
= TYPE_NEXT_VARIANT (var
))
2735 SET_TYPE_UNCONSTRAINED_ARRAY (var
, new_type
);
2737 TYPE_POINTER_TO (new_type
) = TYPE_REFERENCE_TO (new_type
)
2738 = TREE_TYPE (new_type
) = ptr
;
2740 /* Now handle updating the allocation record, what the thin pointer
2741 points to. Update all pointers from the old record into the new
2742 one, update the types of the fields, and recompute the size. */
2744 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type
), new_obj_rec
);
2746 TREE_TYPE (TYPE_FIELDS (new_obj_rec
)) = TREE_TYPE (ptr_temp_type
);
2747 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2748 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
)));
2749 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2750 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2751 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
)))
2752 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr
))));
2754 TYPE_SIZE (new_obj_rec
)
2755 = size_binop (PLUS_EXPR
,
2756 DECL_SIZE (TYPE_FIELDS (new_obj_rec
)),
2757 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2758 TYPE_SIZE_UNIT (new_obj_rec
)
2759 = size_binop (PLUS_EXPR
,
2760 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec
)),
2761 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec
))));
2762 rest_of_type_compilation (ptr
, global_bindings_p ());
2766 /* Convert a pointer to a constrained array into a pointer to a fat
2767 pointer. This involves making or finding a template. */
2770 convert_to_fat_pointer (tree type
, tree expr
)
2772 tree template_type
= TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
))));
2773 tree
template, template_addr
;
2774 tree etype
= TREE_TYPE (expr
);
2776 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2777 pointer to the template and array. */
2778 if (integer_zerop (expr
))
2780 gnat_build_constructor
2782 tree_cons (TYPE_FIELDS (type
),
2783 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2784 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2785 convert (build_pointer_type (template_type
),
2789 /* If EXPR is a thin pointer, make the template and data from the record. */
2791 else if (TYPE_THIN_POINTER_P (etype
))
2793 tree fields
= TYPE_FIELDS (TREE_TYPE (etype
));
2795 expr
= save_expr (expr
);
2796 if (TREE_CODE (expr
) == ADDR_EXPR
)
2797 expr
= TREE_OPERAND (expr
, 0);
2799 expr
= build1 (INDIRECT_REF
, TREE_TYPE (etype
), expr
);
2801 template = build_component_ref (expr
, NULL_TREE
, fields
, 0);
2802 expr
= build_unary_op (ADDR_EXPR
, NULL_TREE
,
2803 build_component_ref (expr
, NULL_TREE
,
2804 TREE_CHAIN (fields
), 0));
2807 /* Otherwise, build the constructor for the template. */
2808 template = build_template (template_type
, TREE_TYPE (etype
), expr
);
2810 template_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, template);
2812 /* The result is a CONSTRUCTOR for the fat pointer.
2814 If expr is an argument of a foreign convention subprogram, the type it
2815 points to is directly the component type. In this case, the expression
2816 type may not match the corresponding FIELD_DECL type at this point, so we
2817 call "convert" here to fix that up if necessary. This type consistency is
2818 required, for instance because it ensures that possible later folding of
2819 component_refs against this constructor always yields something of the
2820 same type as the initial reference.
2822 Note that the call to "build_template" above is still fine, because it
2823 will only refer to the provided template_type in this case. */
2825 gnat_build_constructor
2826 (type
, tree_cons (TYPE_FIELDS (type
),
2827 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
2828 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2829 template_addr
, NULL_TREE
)));
2832 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2833 is something that is a fat pointer, so convert to it first if it EXPR
2834 is not already a fat pointer. */
2837 convert_to_thin_pointer (tree type
, tree expr
)
2839 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr
)))
2841 = convert_to_fat_pointer
2842 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))), expr
);
2844 /* We get the pointer to the data and use a NOP_EXPR to make it the
2847 = build_component_ref (expr
, NULL_TREE
, TYPE_FIELDS (TREE_TYPE (expr
)), 0);
2848 expr
= build1 (NOP_EXPR
, type
, expr
);
2853 /* Create an expression whose value is that of EXPR,
2854 converted to type TYPE. The TREE_TYPE of the value
2855 is always TYPE. This function implements all reasonable
2856 conversions; callers should filter out those that are
2857 not permitted by the language being compiled. */
2860 convert (tree type
, tree expr
)
2862 enum tree_code code
= TREE_CODE (type
);
2863 tree etype
= TREE_TYPE (expr
);
2864 enum tree_code ecode
= TREE_CODE (etype
);
2867 /* If EXPR is already the right type, we are done. */
2870 /* If we're converting between two aggregate types that have the same main
2871 variant, just make a NOP_EXPR. */
2872 else if (AGGREGATE_TYPE_P (type
)
2873 && TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
))
2874 return build1 (NOP_EXPR
, type
, expr
);
2876 /* If the input type has padding, remove it by doing a component reference
2877 to the field. If the output type has padding, make a constructor
2878 to build the record. If both input and output have padding and are
2879 of variable size, do this as an unchecked conversion. */
2880 else if (ecode
== RECORD_TYPE
&& code
== RECORD_TYPE
2881 && TYPE_IS_PADDING_P (type
) && TYPE_IS_PADDING_P (etype
)
2882 && (! TREE_CONSTANT (TYPE_SIZE (type
))
2883 || ! TREE_CONSTANT (TYPE_SIZE (etype
))))
2885 else if (ecode
== RECORD_TYPE
&& TYPE_IS_PADDING_P (etype
))
2887 /* If we have just converted to this padded type, just get
2888 the inner expression. */
2889 if (TREE_CODE (expr
) == CONSTRUCTOR
2890 && CONSTRUCTOR_ELTS (expr
) != 0
2891 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr
)) == TYPE_FIELDS (etype
))
2892 return TREE_VALUE (CONSTRUCTOR_ELTS (expr
));
2894 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2895 TYPE_FIELDS (etype
), 0));
2897 else if (code
== RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
2899 /* If we previously converted from another type and our type is
2900 of variable size, remove the conversion to avoid the need for
2901 variable-size temporaries. */
2902 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
2903 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2904 expr
= TREE_OPERAND (expr
, 0);
2906 /* If we are just removing the padding from expr, convert the original
2907 object if we have variable size. That will avoid the need
2908 for some variable-size temporaries. */
2909 if (TREE_CODE (expr
) == COMPONENT_REF
2910 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr
, 0))) == RECORD_TYPE
2911 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr
, 0)))
2912 && ! TREE_CONSTANT (TYPE_SIZE (type
)))
2913 return convert (type
, TREE_OPERAND (expr
, 0));
2915 /* If the result type is a padded type with a self-referentially-sized
2916 field and the expression type is a record, do this as an
2917 unchecked converstion. */
2918 else if (TREE_CODE (etype
) == RECORD_TYPE
2919 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type
))))
2920 return unchecked_convert (type
, expr
, 0);
2924 gnat_build_constructor (type
,
2925 tree_cons (TYPE_FIELDS (type
),
2927 (TYPE_FIELDS (type
)),
2932 /* If the input is a biased type, adjust first. */
2933 if (ecode
== INTEGER_TYPE
&& TYPE_BIASED_REPRESENTATION_P (etype
))
2934 return convert (type
, fold (build (PLUS_EXPR
, TREE_TYPE (etype
),
2935 fold (build1 (GNAT_NOP_EXPR
,
2936 TREE_TYPE (etype
), expr
)),
2937 TYPE_MIN_VALUE (etype
))));
2939 /* If the input is a left-justified modular type, we need to extract
2940 the actual object before converting it to any other type with the
2941 exception of an unconstrained array. */
2942 if (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)
2943 && code
!= UNCONSTRAINED_ARRAY_TYPE
)
2944 return convert (type
, build_component_ref (expr
, NULL_TREE
,
2945 TYPE_FIELDS (etype
), 0));
2947 /* If converting to a type that contains a template, convert to the data
2948 type and then build the template. */
2949 if (code
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (type
))
2951 tree obj_type
= TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type
)));
2953 /* If the source already has a template, get a reference to the
2954 associated array only, as we are going to rebuild a template
2955 for the target type anyway. */
2956 expr
= maybe_unconstrained_array (expr
);
2959 gnat_build_constructor
2961 tree_cons (TYPE_FIELDS (type
),
2962 build_template (TREE_TYPE (TYPE_FIELDS (type
)),
2963 obj_type
, NULL_TREE
),
2964 tree_cons (TREE_CHAIN (TYPE_FIELDS (type
)),
2965 convert (obj_type
, expr
), NULL_TREE
)));
2968 /* There are some special cases of expressions that we process
2970 switch (TREE_CODE (expr
))
2975 case TRANSFORM_EXPR
:
2977 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2978 conversion in gnat_expand_expr. NULL_EXPR does not represent
2979 and actual value, so no conversion is needed. */
2980 expr
= copy_node (expr
);
2981 TREE_TYPE (expr
) = type
;
2986 /* If we are converting a STRING_CST to another constrained array type,
2987 just make a new one in the proper type. Likewise for
2988 CONSTRUCTOR if the alias sets are the same. */
2989 if (code
== ecode
&& AGGREGATE_TYPE_P (etype
)
2990 && ! (TREE_CODE (TYPE_SIZE (etype
)) == INTEGER_CST
2991 && TREE_CODE (TYPE_SIZE (type
)) != INTEGER_CST
)
2992 && (TREE_CODE (expr
) == STRING_CST
2993 || get_alias_set (etype
) == get_alias_set (type
)))
2995 expr
= copy_node (expr
);
2996 TREE_TYPE (expr
) = type
;
3002 /* If we are converting between two aggregate types of the same
3003 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3004 This avoid unneeded conversions which makes reference computations
3006 if (code
== ecode
&& TYPE_MODE (type
) == TYPE_MODE (etype
)
3007 && AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
3008 && TYPE_ALIGN (type
) == TYPE_ALIGN (etype
)
3009 && operand_equal_p (TYPE_SIZE (type
), TYPE_SIZE (etype
), 0)
3010 && get_alias_set (type
) == get_alias_set (etype
))
3011 return build (COMPONENT_REF
, type
, TREE_OPERAND (expr
, 0),
3012 TREE_OPERAND (expr
, 1));
3016 case UNCONSTRAINED_ARRAY_REF
:
3017 /* Convert this to the type of the inner array by getting the address of
3018 the array from the template. */
3019 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3020 build_component_ref (TREE_OPERAND (expr
, 0),
3021 get_identifier ("P_ARRAY"),
3023 etype
= TREE_TYPE (expr
);
3024 ecode
= TREE_CODE (etype
);
3027 case VIEW_CONVERT_EXPR
:
3028 if (AGGREGATE_TYPE_P (type
) && AGGREGATE_TYPE_P (etype
)
3029 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
3030 return convert (type
, TREE_OPERAND (expr
, 0));
3034 /* If both types are record types, just convert the pointer and
3035 make a new INDIRECT_REF.
3037 ??? Disable this for now since it causes problems with the
3038 code in build_binary_op for MODIFY_EXPR which wants to
3039 strip off conversions. But that code really is a mess and
3040 we need to do this a much better way some time. */
3042 && (TREE_CODE (type
) == RECORD_TYPE
3043 || TREE_CODE (type
) == UNION_TYPE
)
3044 && (TREE_CODE (etype
) == RECORD_TYPE
3045 || TREE_CODE (etype
) == UNION_TYPE
)
3046 && ! TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
3047 return build_unary_op (INDIRECT_REF
, NULL_TREE
,
3048 convert (build_pointer_type (type
),
3049 TREE_OPERAND (expr
, 0)));
3056 /* Check for converting to a pointer to an unconstrained array. */
3057 if (TYPE_FAT_POINTER_P (type
) && ! TYPE_FAT_POINTER_P (etype
))
3058 return convert_to_fat_pointer (type
, expr
);
3060 if (TYPE_MAIN_VARIANT (type
) == TYPE_MAIN_VARIANT (etype
)
3061 || (code
== INTEGER_CST
&& ecode
== INTEGER_CST
3062 && (type
== TREE_TYPE (etype
) || etype
== TREE_TYPE (type
))))
3063 return fold (build1 (NOP_EXPR
, type
, expr
));
3068 return build1 (CONVERT_EXPR
, type
, expr
);
3071 if (TYPE_HAS_ACTUAL_BOUNDS_P (type
)
3072 && (ecode
== ARRAY_TYPE
|| ecode
== UNCONSTRAINED_ARRAY_TYPE
3073 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))))
3074 return unchecked_convert (type
, expr
, 0);
3075 else if (TYPE_BIASED_REPRESENTATION_P (type
))
3076 return fold (build1 (CONVERT_EXPR
, type
,
3077 fold (build (MINUS_EXPR
, TREE_TYPE (type
),
3078 convert (TREE_TYPE (type
), expr
),
3079 TYPE_MIN_VALUE (type
)))));
3081 /* ... fall through ... */
3084 return fold (convert_to_integer (type
, expr
));
3087 case REFERENCE_TYPE
:
3088 /* If converting between two pointers to records denoting
3089 both a template and type, adjust if needed to account
3090 for any differing offsets, since one might be negative. */
3091 if (TYPE_THIN_POINTER_P (etype
) && TYPE_THIN_POINTER_P (type
))
3094 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype
))),
3095 bit_position (TYPE_FIELDS (TREE_TYPE (type
))));
3096 tree byte_diff
= size_binop (CEIL_DIV_EXPR
, bit_diff
,
3097 sbitsize_int (BITS_PER_UNIT
));
3099 expr
= build1 (NOP_EXPR
, type
, expr
);
3100 TREE_CONSTANT (expr
) = TREE_CONSTANT (TREE_OPERAND (expr
, 0));
3101 if (integer_zerop (byte_diff
))
3104 return build_binary_op (PLUS_EXPR
, type
, expr
,
3105 fold (convert_to_pointer (type
, byte_diff
)));
3108 /* If converting to a thin pointer, handle specially. */
3109 if (TYPE_THIN_POINTER_P (type
)
3110 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != 0)
3111 return convert_to_thin_pointer (type
, expr
);
3113 /* If converting fat pointer to normal pointer, get the pointer to the
3114 array and then convert it. */
3115 else if (TYPE_FAT_POINTER_P (etype
))
3116 expr
= build_component_ref (expr
, get_identifier ("P_ARRAY"),
3119 return fold (convert_to_pointer (type
, expr
));
3122 return fold (convert_to_real (type
, expr
));
3125 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type
) && ! AGGREGATE_TYPE_P (etype
))
3127 gnat_build_constructor
3128 (type
, tree_cons (TYPE_FIELDS (type
),
3129 convert (TREE_TYPE (TYPE_FIELDS (type
)), expr
),
3132 /* ... fall through ... */
3135 /* In these cases, assume the front-end has validated the conversion.
3136 If the conversion is valid, it will be a bit-wise conversion, so
3137 it can be viewed as an unchecked conversion. */
3138 return unchecked_convert (type
, expr
, 0);
3141 /* Just validate that the type is indeed that of a field
3142 of the type. Then make the simple conversion. */
3143 for (tem
= TYPE_FIELDS (type
); tem
; tem
= TREE_CHAIN (tem
))
3145 if (TREE_TYPE (tem
) == etype
)
3146 return build1 (CONVERT_EXPR
, type
, expr
);
3147 else if (TREE_CODE (TREE_TYPE (tem
)) == RECORD_TYPE
3148 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem
))
3149 || TYPE_IS_PADDING_P (TREE_TYPE (tem
)))
3150 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem
))) == etype
)
3151 return build1 (CONVERT_EXPR
, type
,
3152 convert (TREE_TYPE (tem
), expr
));
3157 case UNCONSTRAINED_ARRAY_TYPE
:
3158 /* If EXPR is a constrained array, take its address, convert it to a
3159 fat pointer, and then dereference it. Likewise if EXPR is a
3160 record containing both a template and a constrained array.
3161 Note that a record representing a left justified modular type
3162 always represents a packed constrained array. */
3163 if (ecode
== ARRAY_TYPE
3164 || (ecode
== INTEGER_TYPE
&& TYPE_HAS_ACTUAL_BOUNDS_P (etype
))
3165 || (ecode
== RECORD_TYPE
&& TYPE_CONTAINS_TEMPLATE_P (etype
))
3166 || (ecode
== RECORD_TYPE
&& TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
)))
3169 (INDIRECT_REF
, NULL_TREE
,
3170 convert_to_fat_pointer (TREE_TYPE (type
),
3171 build_unary_op (ADDR_EXPR
,
3174 /* Do something very similar for converting one unconstrained
3175 array to another. */
3176 else if (ecode
== UNCONSTRAINED_ARRAY_TYPE
)
3178 build_unary_op (INDIRECT_REF
, NULL_TREE
,
3179 convert (TREE_TYPE (type
),
3180 build_unary_op (ADDR_EXPR
,
3186 return fold (convert_to_complex (type
, expr
));
3193 /* Remove all conversions that are done in EXP. This includes converting
3194 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3195 is nonzero, always return the address of the containing object even if
3196 the address is not bit-aligned. */
3199 remove_conversions (tree exp
, int true_address
)
3201 switch (TREE_CODE (exp
))
3205 && TREE_CODE (TREE_TYPE (exp
)) == RECORD_TYPE
3206 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp
)))
3207 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp
)), 1);
3211 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp
, 0))) == RECORD_TYPE
3212 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3213 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3216 case VIEW_CONVERT_EXPR
: case NON_LVALUE_EXPR
:
3217 case NOP_EXPR
: case CONVERT_EXPR
: case GNAT_NOP_EXPR
:
3218 return remove_conversions (TREE_OPERAND (exp
, 0), true_address
);
3227 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3228 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3229 likewise return an expression pointing to the underlying array. */
3232 maybe_unconstrained_array (tree exp
)
3234 enum tree_code code
= TREE_CODE (exp
);
3237 switch (TREE_CODE (TREE_TYPE (exp
)))
3239 case UNCONSTRAINED_ARRAY_TYPE
:
3240 if (code
== UNCONSTRAINED_ARRAY_REF
)
3243 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
3244 build_component_ref (TREE_OPERAND (exp
, 0),
3245 get_identifier ("P_ARRAY"),
3247 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp
);
3251 else if (code
== NULL_EXPR
)
3252 return build1 (NULL_EXPR
,
3253 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3254 (TREE_TYPE (TREE_TYPE (exp
))))),
3255 TREE_OPERAND (exp
, 0));
3258 /* If this is a padded type, convert to the unpadded type and see if
3259 it contains a template. */
3260 if (TYPE_IS_PADDING_P (TREE_TYPE (exp
)))
3262 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp
))), exp
);
3263 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3264 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3266 build_component_ref (new, NULL_TREE
,
3267 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3270 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp
)))
3272 build_component_ref (exp
, NULL_TREE
,
3273 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp
))), 0);
3283 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3284 If NOTRUNC_P is set, truncation operations should be suppressed. */
3287 unchecked_convert (tree type
, tree expr
, int notrunc_p
)
3289 tree etype
= TREE_TYPE (expr
);
3291 /* If the expression is already the right type, we are done. */
3295 /* If both types types are integral just do a normal conversion.
3296 Likewise for a conversion to an unconstrained array. */
3297 if ((((INTEGRAL_TYPE_P (type
)
3298 && ! (TREE_CODE (type
) == INTEGER_TYPE
3299 && TYPE_VAX_FLOATING_POINT_P (type
)))
3300 || (POINTER_TYPE_P (type
) && ! TYPE_THIN_POINTER_P (type
))
3301 || (TREE_CODE (type
) == RECORD_TYPE
3302 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type
)))
3303 && ((INTEGRAL_TYPE_P (etype
)
3304 && ! (TREE_CODE (etype
) == INTEGER_TYPE
3305 && TYPE_VAX_FLOATING_POINT_P (etype
)))
3306 || (POINTER_TYPE_P (etype
) && ! TYPE_THIN_POINTER_P (etype
))
3307 || (TREE_CODE (etype
) == RECORD_TYPE
3308 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype
))))
3309 || TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3313 if (TREE_CODE (etype
) == INTEGER_TYPE
3314 && TYPE_BIASED_REPRESENTATION_P (etype
))
3316 tree ntype
= copy_type (etype
);
3318 TYPE_BIASED_REPRESENTATION_P (ntype
) = 0;
3319 TYPE_MAIN_VARIANT (ntype
) = ntype
;
3320 expr
= build1 (GNAT_NOP_EXPR
, ntype
, expr
);
3323 if (TREE_CODE (type
) == INTEGER_TYPE
3324 && TYPE_BIASED_REPRESENTATION_P (type
))
3326 rtype
= copy_type (type
);
3327 TYPE_BIASED_REPRESENTATION_P (rtype
) = 0;
3328 TYPE_MAIN_VARIANT (rtype
) = rtype
;
3331 expr
= convert (rtype
, expr
);
3333 expr
= build1 (GNAT_NOP_EXPR
, type
, expr
);
3336 /* If we are converting TO an integral type whose precision is not the
3337 same as its size, first unchecked convert to a record that contains
3338 an object of the output type. Then extract the field. */
3339 else if (INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3340 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3341 GET_MODE_BITSIZE (TYPE_MODE (type
))))
3343 tree rec_type
= make_node (RECORD_TYPE
);
3344 tree field
= create_field_decl (get_identifier ("OBJ"), type
,
3345 rec_type
, 1, 0, 0, 0);
3347 TYPE_FIELDS (rec_type
) = field
;
3348 layout_type (rec_type
);
3350 expr
= unchecked_convert (rec_type
, expr
, notrunc_p
);
3351 expr
= build_component_ref (expr
, NULL_TREE
, field
, 0);
3354 /* Similarly for integral input type whose precision is not equal to its
3356 else if (INTEGRAL_TYPE_P (etype
) && TYPE_RM_SIZE (etype
) != 0
3357 && 0 != compare_tree_int (TYPE_RM_SIZE (etype
),
3358 GET_MODE_BITSIZE (TYPE_MODE (etype
))))
3360 tree rec_type
= make_node (RECORD_TYPE
);
3362 = create_field_decl (get_identifier ("OBJ"), etype
, rec_type
,
3365 TYPE_FIELDS (rec_type
) = field
;
3366 layout_type (rec_type
);
3368 expr
= gnat_build_constructor (rec_type
, build_tree_list (field
, expr
));
3369 expr
= unchecked_convert (type
, expr
, notrunc_p
);
3372 /* We have a special case when we are converting between two
3373 unconstrained array types. In that case, take the address,
3374 convert the fat pointer types, and dereference. */
3375 else if (TREE_CODE (etype
) == UNCONSTRAINED_ARRAY_TYPE
3376 && TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
)
3377 expr
= build_unary_op (INDIRECT_REF
, NULL_TREE
,
3378 build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (type
),
3379 build_unary_op (ADDR_EXPR
, NULL_TREE
,
3383 expr
= maybe_unconstrained_array (expr
);
3384 etype
= TREE_TYPE (expr
);
3385 expr
= build1 (VIEW_CONVERT_EXPR
, type
, expr
);
3388 /* If the result is an integral type whose size is not equal to
3389 the size of the underlying machine type, sign- or zero-extend
3390 the result. We need not do this in the case where the input is
3391 an integral type of the same precision and signedness or if the output
3392 is a biased type or if both the input and output are unsigned. */
3394 && INTEGRAL_TYPE_P (type
) && TYPE_RM_SIZE (type
) != 0
3395 && ! (TREE_CODE (type
) == INTEGER_TYPE
3396 && TYPE_BIASED_REPRESENTATION_P (type
))
3397 && 0 != compare_tree_int (TYPE_RM_SIZE (type
),
3398 GET_MODE_BITSIZE (TYPE_MODE (type
)))
3399 && ! (INTEGRAL_TYPE_P (etype
)
3400 && TYPE_UNSIGNED (type
) == TYPE_UNSIGNED (etype
)
3401 && operand_equal_p (TYPE_RM_SIZE (type
),
3402 (TYPE_RM_SIZE (etype
) != 0
3403 ? TYPE_RM_SIZE (etype
) : TYPE_SIZE (etype
)),
3405 && ! (TYPE_UNSIGNED (type
) && TYPE_UNSIGNED (etype
)))
3407 tree base_type
= gnat_type_for_mode (TYPE_MODE (type
),
3408 TYPE_UNSIGNED (type
));
3410 = convert (base_type
,
3411 size_binop (MINUS_EXPR
,
3413 (GET_MODE_BITSIZE (TYPE_MODE (type
))),
3414 TYPE_RM_SIZE (type
)));
3417 build_binary_op (RSHIFT_EXPR
, base_type
,
3418 build_binary_op (LSHIFT_EXPR
, base_type
,
3419 convert (base_type
, expr
),
3424 /* An unchecked conversion should never raise Constraint_Error. The code
3425 below assumes that GCC's conversion routines overflow the same way that
3426 the underlying hardware does. This is probably true. In the rare case
3427 when it is false, we can rely on the fact that such conversions are
3428 erroneous anyway. */
3429 if (TREE_CODE (expr
) == INTEGER_CST
)
3430 TREE_OVERFLOW (expr
) = TREE_CONSTANT_OVERFLOW (expr
) = 0;
3432 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3433 show no longer constant. */
3434 if (TREE_CODE (expr
) == VIEW_CONVERT_EXPR
3435 && ! operand_equal_p (TYPE_SIZE_UNIT (type
), TYPE_SIZE_UNIT (etype
), 1))
3436 TREE_CONSTANT (expr
) = 0;
3441 #include "gt-ada-utils.h"
3442 #include "gtype-ada.h"