]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/ada/utils.c
3psoccon.ads, [...]: Files added.
[thirdparty/gcc.git] / gcc / ada / utils.c
CommitLineData
415dddc8
RK
1/****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
d479d37f 9 * Copyright (C) 1992-2003, Free Software Foundation, Inc. *
415dddc8
RK
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
71ff80dc 23 * Extensive contributions were provided by Ada Core Technologies Inc. *
415dddc8
RK
24 * *
25 ****************************************************************************/
26
27#include "config.h"
28#include "system.h"
4977bab6
ZW
29#include "coretypes.h"
30#include "tm.h"
415dddc8
RK
31#include "tree.h"
32#include "flags.h"
33#include "defaults.h"
34#include "toplev.h"
35#include "output.h"
36#include "ggc.h"
e2500fed 37#include "debug.h"
415dddc8
RK
38#include "convert.h"
39
40#include "ada.h"
41#include "types.h"
42#include "atree.h"
43#include "elists.h"
44#include "namet.h"
45#include "nlists.h"
46#include "stringt.h"
47#include "uintp.h"
48#include "fe.h"
49#include "sinfo.h"
50#include "einfo.h"
51#include "ada-tree.h"
52#include "gigi.h"
53
54#ifndef MAX_FIXED_MODE_SIZE
55#define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
56#endif
57
58#ifndef MAX_BITS_PER_WORD
59#define MAX_BITS_PER_WORD BITS_PER_WORD
60#endif
61
62/* If nonzero, pretend we are allocating at global level. */
63int force_global;
64
80eaf415 65/* Tree nodes for the various types and decls we create. */
415dddc8
RK
66tree gnat_std_decls[(int) ADT_LAST];
67
07fc65c4
GB
68/* Functions to call for each of the possible raise reasons. */
69tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
70
415dddc8
RK
71/* Associates a GNAT tree node to a GCC tree node. It is used in
72 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
73 of `save_gnu_tree' for more info. */
e2500fed 74static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
415dddc8
RK
75
76/* This listhead is used to record any global objects that need elaboration.
77 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
78 initial value to assign. */
79
e2500fed 80static GTY(()) tree pending_elaborations;
415dddc8
RK
81
82/* This stack allows us to momentarily switch to generating elaboration
83 lists for an inner context. */
84
e2500fed 85struct e_stack GTY(()) {
80eaf415
AJ
86 struct e_stack *next;
87 tree elab_list;
e2500fed
GK
88};
89static GTY(()) struct e_stack *elist_stack;
415dddc8 90
80eaf415 91/* This variable keeps a table for types for each precision so that we only
415dddc8
RK
92 allocate each of them once. Signed and unsigned types are kept separate.
93
94 Note that these types are only used when fold-const requests something
95 special. Perhaps we should NOT share these types; we'll see how it
96 goes later. */
e2500fed 97static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
415dddc8
RK
98
99/* Likewise for float types, but record these by mode. */
e2500fed 100static GTY(()) tree float_types[NUM_MACHINE_MODES];
415dddc8
RK
101
102/* For each binding contour we allocate a binding_level structure which records
103 the entities defined or declared in that contour. Contours include:
104
105 the global one
106 one for each subprogram definition
107 one for each compound statement (declare block)
108
109 Binding contours are used to create GCC tree BLOCK nodes. */
110
e2500fed 111struct binding_level GTY(())
415dddc8
RK
112{
113 /* A chain of ..._DECL nodes for all variables, constants, functions,
114 parameters and type declarations. These ..._DECL nodes are chained
115 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
116 in the reverse of the order supplied to be compatible with the
117 back-end. */
118 tree names;
119 /* For each level (except the global one), a chain of BLOCK nodes for all
120 the levels that were entered and exited one level down from this one. */
121 tree blocks;
122 /* The BLOCK node for this level, if one has been preallocated.
123 If 0, the BLOCK is allocated (if needed) when the level is popped. */
124 tree this_block;
125 /* The binding level containing this one (the enclosing binding level). */
126 struct binding_level *level_chain;
127};
128
129/* The binding level currently in effect. */
e2500fed 130static GTY(()) struct binding_level *current_binding_level;
415dddc8
RK
131
132/* A chain of binding_level structures awaiting reuse. */
e2500fed 133static GTY((deletable (""))) struct binding_level *free_binding_level;
415dddc8
RK
134
135/* The outermost binding level. This binding level is created when the
136 compiler is started and it will exist through the entire compilation. */
137static struct binding_level *global_binding_level;
138
139/* Binding level structures are initialized by copying this one. */
140static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
141
e2500fed
GK
142struct language_function GTY(())
143{
144 int unused;
145};
146
415dddc8
RK
147static tree merge_sizes PARAMS ((tree, tree, tree, int, int));
148static tree compute_related_constant PARAMS ((tree, tree));
149static tree split_plus PARAMS ((tree, tree *));
150static int value_zerop PARAMS ((tree));
151static tree float_type_for_size PARAMS ((int, enum machine_mode));
152static tree convert_to_fat_pointer PARAMS ((tree, tree));
153static tree convert_to_thin_pointer PARAMS ((tree, tree));
154static tree make_descriptor_field PARAMS ((const char *,tree, tree,
155 tree));
fbf5a39b
AC
156static int value_factor_p PARAMS ((tree, int));
157static int potential_alignment_gap PARAMS ((tree, tree, tree));
415dddc8
RK
158\f
159/* Initialize the association of GNAT nodes to GCC trees. */
160
161void
162init_gnat_to_gnu ()
163{
fbf5a39b
AC
164 associate_gnat_to_gnu
165 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
415dddc8
RK
166
167 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
415dddc8
RK
168}
169
170/* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
171 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
172 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
173
174 If GNU_DECL is zero, a previous association is to be reset. */
175
176void
177save_gnu_tree (gnat_entity, gnu_decl, no_check)
178 Entity_Id gnat_entity;
179 tree gnu_decl;
180 int no_check;
181{
fbf5a39b
AC
182 /* Check that GNAT_ENTITY is not already defined and that it is being set
183 to something which is a decl. Raise gigi 401 if not. Usually, this
184 means GNAT_ENTITY is defined twice, but occasionally is due to some
185 Gigi problem. */
415dddc8 186 if (gnu_decl
07fc65c4 187 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
415dddc8
RK
188 || (! no_check && ! DECL_P (gnu_decl))))
189 gigi_abort (401);
190
07fc65c4 191 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
415dddc8
RK
192}
193
194/* GNAT_ENTITY is a GNAT tree node for a defining identifier.
195 Return the ..._DECL node that was associated with it. If there is no tree
196 node associated with GNAT_ENTITY, abort.
197
198 In some cases, such as delayed elaboration or expressions that need to
199 be elaborated only once, GNAT_ENTITY is really not an entity. */
200
201tree
202get_gnu_tree (gnat_entity)
203 Entity_Id gnat_entity;
204{
07fc65c4 205 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
415dddc8
RK
206 gigi_abort (402);
207
07fc65c4 208 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
415dddc8
RK
209}
210
211/* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
212
213int
214present_gnu_tree (gnat_entity)
215 Entity_Id gnat_entity;
216{
07fc65c4 217 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
415dddc8
RK
218}
219
220\f
221/* Return non-zero if we are currently in the global binding level. */
222
223int
224global_bindings_p ()
225{
226 return (force_global != 0 || current_binding_level == global_binding_level
227 ? -1 : 0);
228}
229
230/* Return the list of declarations in the current level. Note that this list
231 is in reverse order (it has to be so for back-end compatibility). */
232
233tree
234getdecls ()
235{
236 return current_binding_level->names;
237}
238
239/* Nonzero if the current level needs to have a BLOCK made. */
240
241int
242kept_level_p ()
243{
244 return (current_binding_level->names != 0);
245}
246
247/* Enter a new binding level. The input parameter is ignored, but has to be
248 specified for back-end compatibility. */
249
250void
251pushlevel (ignore)
252 int ignore ATTRIBUTE_UNUSED;
253{
254 struct binding_level *newlevel = NULL;
255
256 /* Reuse a struct for this binding level, if there is one. */
257 if (free_binding_level)
258 {
259 newlevel = free_binding_level;
260 free_binding_level = free_binding_level->level_chain;
261 }
262 else
263 newlevel
e2500fed 264 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
415dddc8
RK
265
266 *newlevel = clear_binding_level;
267
268 /* Add this level to the front of the chain (stack) of levels that are
269 active. */
270 newlevel->level_chain = current_binding_level;
271 current_binding_level = newlevel;
272}
273
274/* Exit a binding level.
275 Pop the level off, and restore the state of the identifier-decl mappings
276 that were in effect when this level was entered.
277
278 If KEEP is nonzero, this level had explicit declarations, so
279 and create a "block" (a BLOCK node) for the level
280 to record its declarations and subblocks for symbol table output.
281
282 If FUNCTIONBODY is nonzero, this level is the body of a function,
283 so create a block as if KEEP were set and also clear out all
284 label names.
285
286 If REVERSE is nonzero, reverse the order of decls before putting
287 them into the BLOCK. */
288
289tree
290poplevel (keep, reverse, functionbody)
291 int keep;
292 int reverse;
293 int functionbody;
294{
295 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
296 binding level that we are about to exit and which is returned by this
297 routine. */
298 tree block = NULL_TREE;
299 tree decl_chain;
300 tree decl_node;
301 tree subblock_chain = current_binding_level->blocks;
302 tree subblock_node;
303 int block_previously_created;
304
305 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
306 nodes chained through the `names' field of current_binding_level are in
44d6a706 307 reverse order except for PARM_DECL node, which are explicitly stored in
415dddc8
RK
308 the right order. */
309 current_binding_level->names
310 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
311 : current_binding_level->names;
312
313 /* Output any nested inline functions within this block which must be
314 compiled because their address is needed. */
315 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
316 if (TREE_CODE (decl_node) == FUNCTION_DECL
317 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
318 && DECL_INITIAL (decl_node) != 0)
319 {
320 push_function_context ();
321 output_inline_function (decl_node);
322 pop_function_context ();
323 }
324
325 block = 0;
326 block_previously_created = (current_binding_level->this_block != 0);
327 if (block_previously_created)
328 block = current_binding_level->this_block;
329 else if (keep || functionbody)
330 block = make_node (BLOCK);
331 if (block != 0)
332 {
333 BLOCK_VARS (block) = keep ? decl_chain : 0;
334 BLOCK_SUBBLOCKS (block) = subblock_chain;
335 }
336
337 /* Record the BLOCK node just built as the subblock its enclosing scope. */
338 for (subblock_node = subblock_chain; subblock_node;
339 subblock_node = TREE_CHAIN (subblock_node))
340 BLOCK_SUPERCONTEXT (subblock_node) = block;
341
342 /* Clear out the meanings of the local variables of this level. */
343
344 for (subblock_node = decl_chain; subblock_node;
345 subblock_node = TREE_CHAIN (subblock_node))
346 if (DECL_NAME (subblock_node) != 0)
80eaf415 347 /* If the identifier was used or addressed via a local extern decl,
415dddc8
RK
348 don't forget that fact. */
349 if (DECL_EXTERNAL (subblock_node))
350 {
351 if (TREE_USED (subblock_node))
352 TREE_USED (DECL_NAME (subblock_node)) = 1;
353 if (TREE_ADDRESSABLE (subblock_node))
354 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
355 }
356
357 {
358 /* Pop the current level, and free the structure for reuse. */
359 struct binding_level *level = current_binding_level;
360 current_binding_level = current_binding_level->level_chain;
361 level->level_chain = free_binding_level;
362 free_binding_level = level;
363 }
364
365 if (functionbody)
366 {
367 /* This is the top level block of a function. The ..._DECL chain stored
368 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
369 leave them in the BLOCK because they are found in the FUNCTION_DECL
370 instead. */
371 DECL_INITIAL (current_function_decl) = block;
372 BLOCK_VARS (block) = 0;
373 }
374 else if (block)
375 {
376 if (!block_previously_created)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, block);
379 }
380
381 /* If we did not make a block for the level just exited, any blocks made for
382 inner levels (since they cannot be recorded as subblocks in that level)
383 must be carried forward so they will later become subblocks of something
384 else. */
385 else if (subblock_chain)
386 current_binding_level->blocks
387 = chainon (current_binding_level->blocks, subblock_chain);
388 if (block)
389 TREE_USED (block) = 1;
390
391 return block;
392}
393\f
394/* Insert BLOCK at the end of the list of subblocks of the
395 current binding level. This is used when a BIND_EXPR is expanded,
396 to handle the BLOCK node inside the BIND_EXPR. */
397
398void
399insert_block (block)
400 tree block;
401{
402 TREE_USED (block) = 1;
403 current_binding_level->blocks
404 = chainon (current_binding_level->blocks, block);
405}
406
407/* Set the BLOCK node for the innermost scope
408 (the one we are currently in). */
409
410void
411set_block (block)
412 tree block;
413{
414 current_binding_level->this_block = block;
415 current_binding_level->names = chainon (current_binding_level->names,
416 BLOCK_VARS (block));
417 current_binding_level->blocks = chainon (current_binding_level->blocks,
418 BLOCK_SUBBLOCKS (block));
419}
420
421/* Records a ..._DECL node DECL as belonging to the current lexical scope.
422 Returns the ..._DECL node. */
423
424tree
425pushdecl (decl)
426 tree decl;
427{
428 struct binding_level *b;
429
430 /* If at top level, there is no context. But PARM_DECLs always go in the
431 level of its function. */
432 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
433 {
434 b = global_binding_level;
435 DECL_CONTEXT (decl) = 0;
436 }
437 else
438 {
439 b = current_binding_level;
440 DECL_CONTEXT (decl) = current_function_decl;
441 }
442
443 /* Put the declaration on the list. The list of declarations is in reverse
444 order. The list will be reversed later if necessary. This needs to be
445 this way for compatibility with the back-end.
446
447 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
448 will cause trouble with the debugger and aren't needed anyway. */
449 if (TREE_CODE (decl) != TYPE_DECL
450 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
451 {
452 TREE_CHAIN (decl) = b->names;
453 b->names = decl;
454 }
455
456 /* For the declaration of a type, set its name if it either is not already
457 set, was set to an IDENTIFIER_NODE, indicating an internal name,
458 or if the previous type name was not derived from a source name.
459 We'd rather have the type named with a real name and all the pointer
460 types to the same object have the same POINTER_TYPE node. Code in this
461 function in c-decl.c makes a copy of the type node here, but that may
462 cause us trouble with incomplete types, so let's not try it (at least
463 for now). */
464
465 if (TREE_CODE (decl) == TYPE_DECL
466 && DECL_NAME (decl) != 0
467 && (TYPE_NAME (TREE_TYPE (decl)) == 0
468 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
469 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
470 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
471 && ! DECL_ARTIFICIAL (decl))))
472 TYPE_NAME (TREE_TYPE (decl)) = decl;
473
474 return decl;
475}
476\f
477/* Do little here. Set up the standard declarations later after the
478 front end has been run. */
479
480void
f5e99456 481gnat_init_decl_processing ()
415dddc8 482{
d479d37f 483 input_line = 0;
415dddc8 484
415dddc8
RK
485 /* Make the binding_level structure for global names. */
486 current_function_decl = 0;
487 current_binding_level = 0;
488 free_binding_level = 0;
489 pushlevel (0);
490 global_binding_level = current_binding_level;
491
492 build_common_tree_nodes (0);
493
494 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
fbf5a39b
AC
495 corresponding to the size of Pmode. In most cases when ptr_mode and
496 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
497 far better code using the width of Pmode. Make this here since we need
415dddc8 498 this before we can expand the GNAT types. */
fbf5a39b 499 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
415dddc8
RK
500 build_common_tree_nodes_2 (0);
501
502 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
503
504 /* We need to make the integer type before doing anything else.
505 We stitch this in to the appropriate GNAT type later. */
506 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
507 integer_type_node));
508 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
509 char_type_node));
510
511 ptr_void_type_node = build_pointer_type (void_type_node);
512
513}
514
80eaf415 515/* Create the predefined scalar types such as `integer_type_node' needed
415dddc8
RK
516 in the gcc back-end and initialize the global binding level. */
517
518void
519init_gigi_decls (long_long_float_type, exception_type)
520 tree long_long_float_type, exception_type;
521{
07fc65c4
GB
522 tree endlink, decl;
523 unsigned int i;
415dddc8
RK
524
525 /* Set the types that GCC and Gigi use from the front end. We would like
526 to do this for char_type_node, but it needs to correspond to the C
527 char type. */
528 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
529 {
530 /* In this case, the builtin floating point types are VAX float,
531 so make up a type for use. */
532 longest_float_type_node = make_node (REAL_TYPE);
533 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
534 layout_type (longest_float_type_node);
535 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
536 longest_float_type_node));
537 }
538 else
539 longest_float_type_node = TREE_TYPE (long_long_float_type);
540
541 except_type_node = TREE_TYPE (exception_type);
542
b0c48229 543 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
415dddc8
RK
544 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
545 unsigned_type_node));
546
547 void_type_decl_node
548 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
549 void_type_node));
550
551 void_ftype = build_function_type (void_type_node, NULL_TREE);
552 ptr_void_ftype = build_pointer_type (void_ftype);
553
554 /* Now declare runtime functions. */
555 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
556
557 /* malloc is a function declaration tree for a function to allocate
558 memory. */
559 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
560 NULL_TREE,
561 build_function_type (ptr_void_type_node,
562 tree_cons (NULL_TREE,
563 sizetype,
564 endlink)),
565 NULL_TREE, 0, 1, 1, 0);
566
567 /* free is a function declaration tree for a function to free memory. */
415dddc8
RK
568 free_decl
569 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
570 build_function_type (void_type_node,
571 tree_cons (NULL_TREE,
572 ptr_void_type_node,
573 endlink)),
574 NULL_TREE, 0, 1, 1, 0);
575
576 /* Make the types and functions used for exception processing. */
577 jmpbuf_type
b0c48229 578 = build_array_type (gnat_type_for_mode (Pmode, 0),
415dddc8
RK
579 build_index_type (build_int_2 (5, 0)));
580 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
581 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
582
583 /* Functions to get and set the jumpbuf pointer for the current thread. */
584 get_jmpbuf_decl
585 = create_subprog_decl
586 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
587 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
588 NULL_TREE, 0, 1, 1, 0);
589
590 set_jmpbuf_decl
591 = create_subprog_decl
592 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
593 NULL_TREE,
80eaf415 594 build_function_type (void_type_node,
415dddc8
RK
595 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
596 NULL_TREE, 0, 1, 1, 0);
597
598 /* Function to get the current exception. */
599 get_excptr_decl
600 = create_subprog_decl
601 (get_identifier ("system__soft_links__get_gnat_exception"),
602 NULL_TREE,
603 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
604 NULL_TREE, 0, 1, 1, 0);
605
07fc65c4 606 /* Functions that raise exceptions. */
415dddc8
RK
607 raise_nodefer_decl
608 = create_subprog_decl
609 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
610 build_function_type (void_type_node,
611 tree_cons (NULL_TREE,
612 build_pointer_type (except_type_node),
613 endlink)),
614 NULL_TREE, 0, 1, 1, 0);
615
fbf5a39b
AC
616 /* Hooks to call when entering/leaving an exception handler. */
617 begin_handler_decl
618 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
619 build_function_type (void_type_node,
620 tree_cons (NULL_TREE,
621 ptr_void_type_node,
622 endlink)),
623 NULL_TREE, 0, 1, 1, 0);
624
625 end_handler_decl
626 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
627 build_function_type (void_type_node,
628 tree_cons (NULL_TREE,
629 ptr_void_type_node,
630 endlink)),
631 NULL_TREE, 0, 1, 1, 0);
632
07fc65c4
GB
633 /* If in no exception handlers mode, all raise statements are redirected to
634 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
635 this procedure will never be called in this mode. */
636 if (No_Exception_Handlers_Set ())
637 {
638 decl
639 = create_subprog_decl
640 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
641 build_function_type (void_type_node,
642 tree_cons (NULL_TREE,
643 build_pointer_type (char_type_node),
644 tree_cons (NULL_TREE,
645 integer_type_node,
646 endlink))),
647 NULL_TREE, 0, 1, 1, 0);
648
62c71f4b 649 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
07fc65c4
GB
650 gnat_raise_decls[i] = decl;
651 }
652 else
653 /* Otherwise, make one decl for each exception reason. */
62c71f4b 654 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
07fc65c4
GB
655 {
656 char name[17];
657
658 sprintf (name, "__gnat_rcheck_%.2d", i);
659 gnat_raise_decls[i]
660 = create_subprog_decl
661 (get_identifier (name), NULL_TREE,
662 build_function_type (void_type_node,
663 tree_cons (NULL_TREE,
664 build_pointer_type
665 (char_type_node),
666 tree_cons (NULL_TREE,
667 integer_type_node,
668 endlink))),
669 NULL_TREE, 0, 1, 1, 0);
670 }
415dddc8
RK
671
672 /* Indicate that these never return. */
415dddc8 673 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
415dddc8 674 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
415dddc8
RK
675 TREE_TYPE (raise_nodefer_decl)
676 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
677 TYPE_QUAL_VOLATILE);
07fc65c4 678
62c71f4b 679 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
07fc65c4
GB
680 {
681 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
682 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
683 TREE_TYPE (gnat_raise_decls[i])
684 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
685 TYPE_QUAL_VOLATILE);
686 }
415dddc8
RK
687
688 /* setjmp returns an integer and has one operand, which is a pointer to
689 a jmpbuf. */
690 setjmp_decl
691 = create_subprog_decl
692 (get_identifier ("setjmp"), NULL_TREE,
693 build_function_type (integer_type_node,
694 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
695 NULL_TREE, 0, 1, 1, 0);
696
697 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
698 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
699
07fc65c4 700 main_identifier_node = get_identifier ("main");
415dddc8
RK
701}
702\f
415dddc8 703/* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
80eaf415 704 nodes (FIELDLIST), finish constructing the record or union type.
415dddc8 705 If HAS_REP is nonzero, this record has a rep clause; don't call
80eaf415 706 layout_type but merely set the size and alignment ourselves.
415dddc8
RK
707 If DEFER_DEBUG is nonzero, do not call the debugging routines
708 on this type; it will be done later. */
709
710void
711finish_record_type (record_type, fieldlist, has_rep, defer_debug)
712 tree record_type;
713 tree fieldlist;
714 int has_rep;
715 int defer_debug;
716{
717 enum tree_code code = TREE_CODE (record_type);
718 tree ada_size = bitsize_zero_node;
719 tree size = bitsize_zero_node;
720 tree size_unit = size_zero_node;
07fc65c4 721 int var_size = 0;
415dddc8
RK
722 tree field;
723
724 TYPE_FIELDS (record_type) = fieldlist;
725
726 if (TYPE_NAME (record_type) != 0
727 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
728 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
729 else
730 TYPE_STUB_DECL (record_type)
731 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
732 record_type));
733
734 /* We don't need both the typedef name and the record name output in
735 the debugging information, since they are the same. */
736 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
737
738 /* Globally initialize the record first. If this is a rep'ed record,
739 that just means some initializations; otherwise, layout the record. */
740
741 if (has_rep)
742 {
743 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
744 TYPE_MODE (record_type) = BLKmode;
745 if (TYPE_SIZE (record_type) == 0)
746 {
747 TYPE_SIZE (record_type) = bitsize_zero_node;
748 TYPE_SIZE_UNIT (record_type) = size_zero_node;
749 }
fbf5a39b
AC
750 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
751 out just like a UNION_TYPE, since the size will be fixed. */
752 else if (code == QUAL_UNION_TYPE)
753 code = UNION_TYPE;
415dddc8
RK
754 }
755 else
756 {
757 /* Ensure there isn't a size already set. There can be in an error
758 case where there is a rep clause but all fields have errors and
759 no longer have a position. */
760 TYPE_SIZE (record_type) = 0;
761 layout_type (record_type);
762 }
763
764 /* At this point, the position and size of each field is known. It was
765 either set before entry by a rep clause, or by laying out the type
766 above. We now make a pass through the fields (in reverse order for
767 QUAL_UNION_TYPEs) to compute the Ada size; the GCC size and alignment
768 (for rep'ed records that are not padding types); and the mode (for
769 rep'ed records). */
770
771 if (code == QUAL_UNION_TYPE)
772 fieldlist = nreverse (fieldlist);
773
774 for (field = fieldlist; field; field = TREE_CHAIN (field))
775 {
776 tree type = TREE_TYPE (field);
777 tree this_size = DECL_SIZE (field);
778 tree this_size_unit = DECL_SIZE_UNIT (field);
779 tree this_ada_size = DECL_SIZE (field);
780
07fc65c4
GB
781 /* We need to make an XVE/XVU record if any field has variable size,
782 whether or not the record does. For example, if we have an union,
783 it may be that all fields, rounded up to the alignment, have the
784 same size, in which case we'll use that size. But the debug
785 output routines (except Dwarf2) won't be able to output the fields,
786 so we need to make the special record. */
787 if (TREE_CODE (this_size) != INTEGER_CST)
788 var_size = 1;
789
415dddc8
RK
790 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
791 || TREE_CODE (type) == QUAL_UNION_TYPE)
792 && ! TYPE_IS_FAT_POINTER_P (type)
793 && ! TYPE_CONTAINS_TEMPLATE_P (type)
794 && TYPE_ADA_SIZE (type) != 0)
795 this_ada_size = TYPE_ADA_SIZE (type);
796
797 if (has_rep && ! DECL_BIT_FIELD (field))
798 TYPE_ALIGN (record_type)
799 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
800
801 switch (code)
802 {
803 case UNION_TYPE:
804 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
805 size = size_binop (MAX_EXPR, size, this_size);
806 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
807 break;
808
809 case QUAL_UNION_TYPE:
810 ada_size
811 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
812 this_ada_size, ada_size));
813 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
814 this_size, size));
815 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
816 this_size_unit, size_unit));
817 break;
818
819 case RECORD_TYPE:
820 /* Since we know here that all fields are sorted in order of
821 increasing bit position, the size of the record is one
822 higher than the ending bit of the last field processed
823 unless we have a rep clause, since in that case we might
824 have a field outside a QUAL_UNION_TYPE that has a higher ending
825 position. So use a MAX in that case. Also, if this field is a
826 QUAL_UNION_TYPE, we need to take into account the previous size in
827 the case of empty variants. */
828 ada_size
829 = merge_sizes (ada_size, bit_position (field), this_ada_size,
830 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
831 size = merge_sizes (size, bit_position (field), this_size,
832 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
833 size_unit
834 = merge_sizes (size_unit, byte_position (field), this_size_unit,
835 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
836 break;
837
838 default:
839 abort ();
840 }
841 }
842
843 if (code == QUAL_UNION_TYPE)
844 nreverse (fieldlist);
845
846 /* If this is a padding record, we never want to make the size smaller than
847 what was specified in it, if any. */
848 if (TREE_CODE (record_type) == RECORD_TYPE
849 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
850 {
851 size = TYPE_SIZE (record_type);
852 size_unit = TYPE_SIZE_UNIT (record_type);
853 }
854
855 /* Now set any of the values we've just computed that apply. */
856 if (! TYPE_IS_FAT_POINTER_P (record_type)
857 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
e2500fed 858 SET_TYPE_ADA_SIZE (record_type, ada_size);
415dddc8 859
fbf5a39b 860 if (has_rep)
415dddc8 861 {
fbf5a39b
AC
862 if (! (TREE_CODE (record_type) == RECORD_TYPE
863 && TYPE_IS_PADDING_P (record_type)
864 && CONTAINS_PLACEHOLDER_P (size)))
865 {
866 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
867 TYPE_SIZE_UNIT (record_type)
868 = round_up (size_unit,
869 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
870 }
415dddc8 871
fbf5a39b
AC
872 compute_record_mode (record_type);
873 }
415dddc8
RK
874
875 if (! defer_debug)
876 {
877 /* If this record is of variable size, rename it so that the
878 debugger knows it is and make a new, parallel, record
879 that tells the debugger how the record is laid out. See
fbf5a39b
AC
880 exp_dbug.ads. But don't do this for records that are padding
881 since they confuse GDB. */
882 if (var_size
883 && ! (TREE_CODE (record_type) == RECORD_TYPE
884 && TYPE_IS_PADDING_P (record_type)))
415dddc8
RK
885 {
886 tree new_record_type
887 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
888 ? UNION_TYPE : TREE_CODE (record_type));
889 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
890 tree new_id
891 = concat_id_with_name (orig_id,
892 TREE_CODE (record_type) == QUAL_UNION_TYPE
893 ? "XVU" : "XVE");
894 tree last_pos = bitsize_zero_node;
895 tree old_field;
fbf5a39b 896 tree prev_old_field = 0;
415dddc8
RK
897
898 TYPE_NAME (new_record_type) = new_id;
899 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
900 TYPE_STUB_DECL (new_record_type)
901 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
902 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
903 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
904 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
905 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
906
907 /* Now scan all the fields, replacing each field with a new
908 field corresponding to the new encoding. */
909 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
910 old_field = TREE_CHAIN (old_field))
911 {
912 tree field_type = TREE_TYPE (old_field);
913 tree field_name = DECL_NAME (old_field);
914 tree new_field;
915 tree curpos = bit_position (old_field);
916 int var = 0;
917 unsigned int align = 0;
918 tree pos;
919
920 /* See how the position was modified from the last position.
921
922 There are two basic cases we support: a value was added
923 to the last position or the last position was rounded to
924 a boundary and they something was added. Check for the
925 first case first. If not, see if there is any evidence
926 of rounding. If so, round the last position and try
80eaf415 927 again.
415dddc8
RK
928
929 If this is a union, the position can be taken as zero. */
930
931 if (TREE_CODE (new_record_type) == UNION_TYPE)
932 pos = bitsize_zero_node, align = 0;
933 else
934 pos = compute_related_constant (curpos, last_pos);
935
936 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
937 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
938 {
939 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
940 pos = compute_related_constant (curpos,
941 round_up (last_pos, align));
942 }
943 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
944 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
945 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
946 && host_integerp (TREE_OPERAND
947 (TREE_OPERAND (curpos, 0), 1),
948 1))
949 {
950 align
951 = tree_low_cst
952 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
953 pos = compute_related_constant (curpos,
954 round_up (last_pos, align));
955 }
fbf5a39b
AC
956 else if (potential_alignment_gap (prev_old_field, old_field,
957 pos))
958 {
959 align = TYPE_ALIGN (field_type);
960 pos = compute_related_constant (curpos,
961 round_up (last_pos, align));
962 }
415dddc8
RK
963
964 /* If we can't compute a position, set it to zero.
965
966 ??? We really should abort here, but it's too much work
967 to get this correct for all cases. */
968
969 if (pos == 0)
970 pos = bitsize_zero_node;
971
972 /* See if this type is variable-size and make a new type
973 and indicate the indirection if so. */
07fc65c4 974 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
415dddc8
RK
975 {
976 field_type = build_pointer_type (field_type);
977 var = 1;
978 }
979
980 /* Make a new field name, if necessary. */
981 if (var || align != 0)
982 {
983 char suffix[6];
984
985 if (align != 0)
986 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
987 align / BITS_PER_UNIT);
988 else
989 strcpy (suffix, "XVL");
990
991 field_name = concat_id_with_name (field_name, suffix);
992 }
993
994 new_field = create_field_decl (field_name, field_type,
995 new_record_type, 0,
07fc65c4 996 DECL_SIZE (old_field), pos, 0);
415dddc8
RK
997 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
998 TYPE_FIELDS (new_record_type) = new_field;
999
1000 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1001 zero. The only time it's not the last field of the record
1002 is when there are other components at fixed positions after
1003 it (meaning there was a rep clause for every field) and we
1004 want to be able to encode them. */
1005 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1006 (TREE_CODE (TREE_TYPE (old_field))
1007 == QUAL_UNION_TYPE)
1008 ? bitsize_zero_node
07fc65c4 1009 : DECL_SIZE (old_field));
fbf5a39b 1010 prev_old_field = old_field;
415dddc8
RK
1011 }
1012
1013 TYPE_FIELDS (new_record_type)
1014 = nreverse (TYPE_FIELDS (new_record_type));
1015
1016 rest_of_type_compilation (new_record_type, global_bindings_p ());
1017 }
1018
1019 rest_of_type_compilation (record_type, global_bindings_p ());
1020 }
1021}
1022
1023/* Utility function of above to merge LAST_SIZE, the previous size of a record
1024 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1025 if this represents a QUAL_UNION_TYPE in which case we must look for
1026 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1027 is nonzero, we must take the MAX of the end position of this field
1028 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1029
1030 We return an expression for the size. */
1031
1032static tree
1033merge_sizes (last_size, first_bit, size, special, has_rep)
1034 tree last_size;
1035 tree first_bit, size;
1036 int special;
1037 int has_rep;
1038{
1039 tree type = TREE_TYPE (last_size);
fbf5a39b 1040 tree new;
415dddc8
RK
1041
1042 if (! special || TREE_CODE (size) != COND_EXPR)
1043 {
fbf5a39b 1044 new = size_binop (PLUS_EXPR, first_bit, size);
415dddc8
RK
1045 if (has_rep)
1046 new = size_binop (MAX_EXPR, last_size, new);
415dddc8
RK
1047 }
1048
fbf5a39b
AC
1049 else
1050 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1051 integer_zerop (TREE_OPERAND (size, 1))
1052 ? last_size : merge_sizes (last_size, first_bit,
1053 TREE_OPERAND (size, 1),
1054 1, has_rep),
1055 integer_zerop (TREE_OPERAND (size, 2))
415dddc8
RK
1056 ? last_size : merge_sizes (last_size, first_bit,
1057 TREE_OPERAND (size, 2),
1058 1, has_rep)));
fbf5a39b
AC
1059
1060 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1061 when fed through substitute_in_expr) into thinking that a constant
1062 size is not constant. */
1063 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1064 new = TREE_OPERAND (new, 0);
1065
1066 return new;
415dddc8
RK
1067}
1068
1069/* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1070 related by the addition of a constant. Return that constant if so. */
1071
1072static tree
1073compute_related_constant (op0, op1)
1074 tree op0, op1;
1075{
1076 tree op0_var, op1_var;
1077 tree op0_con = split_plus (op0, &op0_var);
1078 tree op1_con = split_plus (op1, &op1_var);
1079 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1080
1081 if (operand_equal_p (op0_var, op1_var, 0))
1082 return result;
1083 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1084 return result;
1085 else
1086 return 0;
1087}
1088
1089/* Utility function of above to split a tree OP which may be a sum, into a
1090 constant part, which is returned, and a variable part, which is stored
fbf5a39b
AC
1091 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1092 bitsizetype. */
415dddc8
RK
1093
1094static tree
1095split_plus (in, pvar)
1096 tree in;
1097 tree *pvar;
1098{
fbf5a39b
AC
1099 /* Strip NOPS in order to ease the tree traversal and maximize the
1100 potential for constant or plus/minus discovery. We need to be careful
1101 to always return and set *pvar to bitsizetype trees, but it's worth
1102 the effort. */
1103 STRIP_NOPS (in);
415dddc8 1104
fbf5a39b 1105 *pvar = convert (bitsizetype, in);
415dddc8 1106
415dddc8
RK
1107 if (TREE_CODE (in) == INTEGER_CST)
1108 {
1109 *pvar = bitsize_zero_node;
fbf5a39b 1110 return convert (bitsizetype, in);
415dddc8
RK
1111 }
1112 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1113 {
1114 tree lhs_var, rhs_var;
1115 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1116 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1117
415dddc8
RK
1118 if (lhs_var == TREE_OPERAND (in, 0)
1119 && rhs_var == TREE_OPERAND (in, 1))
1120 return bitsize_zero_node;
1121
1122 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
fbf5a39b 1123 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
415dddc8
RK
1124 }
1125 else
1126 return bitsize_zero_node;
1127}
1128\f
1129/* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1130 subprogram. If it is void_type_node, then we are dealing with a procedure,
1131 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1132 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1133 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1134 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
80eaf415 1135 object. RETURNS_BY_REF is nonzero if the function returns by reference.
415dddc8
RK
1136 RETURNS_WITH_DSP is nonzero if the function is to return with a
1137 depressed stack pointer. */
1138
1139tree
1140create_subprog_type (return_type, param_decl_list, cico_list,
1141 returns_unconstrained, returns_by_ref, returns_with_dsp)
1142 tree return_type;
1143 tree param_decl_list;
1144 tree cico_list;
1145 int returns_unconstrained, returns_by_ref, returns_with_dsp;
1146{
1147 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1148 the subprogram formal parameters. This list is generated by traversing the
1149 input list of PARM_DECL nodes. */
1150 tree param_type_list = NULL;
1151 tree param_decl;
1152 tree type;
1153
1154 for (param_decl = param_decl_list; param_decl;
1155 param_decl = TREE_CHAIN (param_decl))
1156 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1157 param_type_list);
1158
1159 /* The list of the function parameter types has to be terminated by the void
1160 type to signal to the back-end that we are not dealing with a variable
1161 parameter subprogram, but that the subprogram has a fixed number of
1162 parameters. */
1163 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1164
1165 /* The list of argument types has been created in reverse
1166 so nreverse it. */
1167 param_type_list = nreverse (param_type_list);
1168
1169 type = build_function_type (return_type, param_type_list);
1170
1171 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1172 or the new type should, make a copy of TYPE. Likewise for
1173 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1174 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1175 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1176 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1177 type = copy_type (type);
1178
e2500fed 1179 SET_TYPE_CI_CO_LIST (type, cico_list);
415dddc8
RK
1180 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1181 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1182 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1183 return type;
1184}
1185\f
1186/* Return a copy of TYPE but safe to modify in any way. */
1187
1188tree
1189copy_type (type)
1190 tree type;
1191{
1192 tree new = copy_node (type);
1193
1194 /* copy_node clears this field instead of copying it, because it is
1195 aliased with TREE_CHAIN. */
1196 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1197
1198 TYPE_POINTER_TO (new) = 0;
1199 TYPE_REFERENCE_TO (new) = 0;
1200 TYPE_MAIN_VARIANT (new) = new;
1201 TYPE_NEXT_VARIANT (new) = 0;
1202
1203 return new;
1204}
1205\f
1206/* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1207 TYPE_INDEX_TYPE is INDEX. */
1208
1209tree
1210create_index_type (min, max, index)
1211 tree min, max;
1212 tree index;
1213{
1214 /* First build a type for the desired range. */
1215 tree type = build_index_2_type (min, max);
1216
1217 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1218 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1219 is set, but not to INDEX, make a copy of this type with the requested
1220 index type. Note that we have no way of sharing these types, but that's
1221 only a small hole. */
1222 if (TYPE_INDEX_TYPE (type) == index)
1223 return type;
1224 else if (TYPE_INDEX_TYPE (type) != 0)
1225 type = copy_type (type);
1226
e2500fed 1227 SET_TYPE_INDEX_TYPE (type, index);
415dddc8
RK
1228 return type;
1229}
1230\f
1231/* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
80eaf415 1232 string) and TYPE is a ..._TYPE node giving its data type.
415dddc8
RK
1233 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1234 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1235 information about this type. */
1236
1237tree
1238create_type_decl (type_name, type, attr_list, artificial_p, debug_info_p)
1239 tree type_name;
1240 tree type;
1241 struct attrib *attr_list;
1242 int artificial_p;
1243 int debug_info_p;
1244{
1245 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1246 enum tree_code code = TREE_CODE (type);
1247
1248 DECL_ARTIFICIAL (type_decl) = artificial_p;
1249 pushdecl (type_decl);
1250 process_attributes (type_decl, attr_list);
1251
1252 /* Pass type declaration information to the debugger unless this is an
1253 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1254 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1255 a dummy type, which will be completed later, or a type for which
1256 debugging information was not requested. */
1257 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1258 || ! debug_info_p)
1259 DECL_IGNORED_P (type_decl) = 1;
1260 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1261 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1262 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1263 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1264
1265 return type_decl;
1266}
1267
1268/* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1269 ASM_NAME is its assembler name (if provided). TYPE is its data type
1270 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1271 expression; NULL_TREE if none.
1272
1273 CONST_FLAG is nonzero if this variable is constant.
1274
1275 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1276 the current compilation unit. This flag should be set when processing the
80eaf415 1277 variable definitions in a package specification. EXTERN_FLAG is nonzero
415dddc8 1278 when processing an external variable declaration (as opposed to a
80eaf415 1279 definition: no storage is to be allocated for the variable here).
415dddc8
RK
1280
1281 STATIC_FLAG is only relevant when not at top level. In that case
1282 it indicates whether to always allocate storage to the variable. */
1283
1284tree
1285create_var_decl (var_name, asm_name, type, var_init, const_flag, public_flag,
1286 extern_flag, static_flag, attr_list)
1287 tree var_name;
1288 tree asm_name;
1289 tree type;
1290 tree var_init;
1291 int const_flag;
1292 int public_flag;
1293 int extern_flag;
1294 int static_flag;
1295 struct attrib *attr_list;
1296{
1297 int init_const
1298 = (var_init == 0
1299 ? 0
1300 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1301 && (global_bindings_p () || static_flag
1302 ? 0 != initializer_constant_valid_p (var_init,
1303 TREE_TYPE (var_init))
1304 : TREE_CONSTANT (var_init))));
1305 tree var_decl
1306 = build_decl ((const_flag && init_const
1307 /* Only make a CONST_DECL for sufficiently-small objects.
1308 We consider complex double "sufficiently-small" */
1309 && TYPE_SIZE (type) != 0
1310 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1311 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1312 GET_MODE_SIZE (DCmode)))
1313 ? CONST_DECL : VAR_DECL, var_name, type);
1314 tree assign_init = 0;
1315
1316 /* If this is external, throw away any initializations unless this is a
1317 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1318 we are defining a global here, leave a constant initialization and save
1319 any variable elaborations for the elaboration routine. Otherwise, if
1320 the initializing expression is not the same as TYPE, generate the
1321 initialization with an assignment statement, since it knows how
226ada7a
GB
1322 to do the required adjustents. If we are just annotating types,
1323 throw away the initialization if it isn't a constant. */
415dddc8 1324
226ada7a
GB
1325 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1326 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
415dddc8
RK
1327 var_init = 0;
1328
1329 if (global_bindings_p () && var_init != 0 && ! init_const)
1330 {
1331 add_pending_elaborations (var_decl, var_init);
1332 var_init = 0;
1333 }
1334
1335 else if (var_init != 0
1336 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1337 != TYPE_MAIN_VARIANT (type))
1338 || (static_flag && ! init_const)))
1339 assign_init = var_init, var_init = 0;
1340
1341 DECL_COMMON (var_decl) = !flag_no_common;
1342 DECL_INITIAL (var_decl) = var_init;
1343 TREE_READONLY (var_decl) = const_flag;
1344 DECL_EXTERNAL (var_decl) = extern_flag;
1345 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1346 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1347 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1348 = TYPE_VOLATILE (type);
1349
1350 /* At the global binding level we need to allocate static storage for the
1351 variable if and only if its not external. If we are not at the top level
1352 we allocate automatic storage unless requested not to. */
1353 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1354
1355 if (asm_name != 0)
1356 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1357
1358 process_attributes (var_decl, attr_list);
1359
1360 /* Add this decl to the current binding level and generate any
1361 needed code and RTL. */
1362 var_decl = pushdecl (var_decl);
1363 expand_decl (var_decl);
1364
1365 if (DECL_CONTEXT (var_decl) != 0)
1366 expand_decl_init (var_decl);
1367
1368 /* If this is volatile, force it into memory. */
1369 if (TREE_SIDE_EFFECTS (var_decl))
dffd7eb6 1370 gnat_mark_addressable (var_decl);
415dddc8
RK
1371
1372 if (TREE_CODE (var_decl) != CONST_DECL)
1373 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1374
1375 if (assign_init != 0)
1376 {
1377 /* If VAR_DECL has a padded type, convert it to the unpadded
1378 type so the assignment is done properly. */
1379 tree lhs = var_decl;
1380
1381 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1382 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1383 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1384
1385 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1386 assign_init));
1387 }
1388
1389 return var_decl;
1390}
1391\f
1392/* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1393 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1394 this field is in a record type with a "pragma pack". If SIZE is nonzero
1395 it is the specified size for this field. If POS is nonzero, it is the bit
1396 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1397 the address of this field for aliasing purposes. */
1398
1399tree
1400create_field_decl (field_name, field_type, record_type, packed, size, pos,
1401 addressable)
1402 tree field_name;
1403 tree field_type;
1404 tree record_type;
1405 int packed;
1406 tree size, pos;
1407 int addressable;
1408{
1409 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1410
1411 DECL_CONTEXT (field_decl) = record_type;
1412 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1413
1414 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
fbf5a39b 1415 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
415dddc8 1416 if (packed && TYPE_MODE (field_type) == BLKmode)
fbf5a39b 1417 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
415dddc8
RK
1418
1419 /* If a size is specified, use it. Otherwise, see if we have a size
1420 to use that may differ from the natural size of the object. */
1421 if (size != 0)
1422 size = convert (bitsizetype, size);
1423 else if (packed)
1424 {
1425 if (packed == 1 && ! operand_equal_p (rm_size (field_type),
1426 TYPE_SIZE (field_type), 0))
1427 size = rm_size (field_type);
1428
1429 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1430 byte. */
1431 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1432 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1433 size = round_up (size, BITS_PER_UNIT);
1434 }
1435
1436 /* Make a bitfield if a size is specified for two reasons: first if the size
1437 differs from the natural size. Second, if the alignment is insufficient.
1438 There are a number of ways the latter can be true. But never make a
1439 bitfield if the type of the field has a nonconstant size. */
1440
1441 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1442 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1443 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1444 || (pos != 0
1445 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1446 bitsize_int (TYPE_ALIGN
1447 (field_type)))))
1448 || packed
1449 || (TYPE_ALIGN (record_type) != 0
1450 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1451 {
1452 DECL_BIT_FIELD (field_decl) = 1;
1453 DECL_SIZE (field_decl) = size;
1454 if (! packed && pos == 0)
1455 DECL_ALIGN (field_decl)
1456 = (TYPE_ALIGN (record_type) != 0
1457 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1458 : TYPE_ALIGN (field_type));
1459 }
1460
1461 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1462 DECL_ALIGN (field_decl)
1463 = MAX (DECL_ALIGN (field_decl),
1464 DECL_BIT_FIELD (field_decl) ? 1
1465 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1466 : TYPE_ALIGN (field_type));
1467
1468 if (pos != 0)
1469 {
1470 /* We need to pass in the alignment the DECL is known to have.
1471 This is the lowest-order bit set in POS, but no more than
1472 the alignment of the record, if one is specified. Note
1473 that an alignment of 0 is taken as infinite. */
1474 unsigned int known_align;
1475
1476 if (host_integerp (pos, 1))
1477 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1478 else
1479 known_align = BITS_PER_UNIT;
1480
1481 if (TYPE_ALIGN (record_type)
1482 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1483 known_align = TYPE_ALIGN (record_type);
1484
1485 layout_decl (field_decl, known_align);
07fc65c4
GB
1486 SET_DECL_OFFSET_ALIGN (field_decl,
1487 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1488 : BITS_PER_UNIT);
415dddc8
RK
1489 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1490 &DECL_FIELD_BIT_OFFSET (field_decl),
07fc65c4 1491 DECL_OFFSET_ALIGN (field_decl), pos);
415dddc8
RK
1492
1493 DECL_HAS_REP_P (field_decl) = 1;
1494 }
1495
07fc65c4
GB
1496 /* If the field type is passed by reference, we will have pointers to the
1497 field, so it is addressable. */
1498 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1499 addressable = 1;
1500
fbf5a39b
AC
1501 /* ??? For now, we say that any field of aggregate type is addressable
1502 because the front end may take 'Reference of it. */
1503 if (AGGREGATE_TYPE_P (field_type))
1504 addressable = 1;
1505
415dddc8
RK
1506 /* Mark the decl as nonaddressable if it either is indicated so semantically
1507 or if it is a bit field. */
1508 DECL_NONADDRESSABLE_P (field_decl)
1509 = ! addressable || DECL_BIT_FIELD (field_decl);
1510
1511 return field_decl;
1512}
1513
1514/* Subroutine of previous function: return nonzero if EXP, ignoring any side
1515 effects, has the value of zero. */
1516
1517static int
1518value_zerop (exp)
1519 tree exp;
1520{
1521 if (TREE_CODE (exp) == COMPOUND_EXPR)
1522 return value_zerop (TREE_OPERAND (exp, 1));
1523
1524 return integer_zerop (exp);
1525}
1526\f
1527/* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1528 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1529 readonly (either an IN parameter or an address of a pass-by-ref
1530 parameter). */
1531
1532tree
1533create_param_decl (param_name, param_type, readonly)
1534 tree param_name;
1535 tree param_type;
1536 int readonly;
1537{
1538 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1539
fbf5a39b
AC
1540 /* Honor the PROMOTE_PROTOTYPES target macro, as not doing so can
1541 lead to various ABI violations. */
1542#ifdef PROMOTE_PROTOTYPES
1543 if ((TREE_CODE (param_type) == INTEGER_TYPE
1544 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1545 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1546 {
1547 /* We have to be careful about biased types here. Make a subtype
1548 of integer_type_node with the proper biasing. */
1549 if (TREE_CODE (param_type) == INTEGER_TYPE
1550 && TYPE_BIASED_REPRESENTATION_P (param_type))
1551 {
1552 param_type
1553 = copy_type (build_range_type (integer_type_node,
1554 TYPE_MIN_VALUE (param_type),
1555 TYPE_MAX_VALUE (param_type)));
1556
1557 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1558 }
1559 else
1560 param_type = integer_type_node;
1561 }
1562#endif
1563
415dddc8
RK
1564 DECL_ARG_TYPE (param_decl) = param_type;
1565 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1566 TREE_READONLY (param_decl) = readonly;
1567 return param_decl;
1568}
1569\f
1570/* Given a DECL and ATTR_LIST, process the listed attributes. */
1571
1572void
1573process_attributes (decl, attr_list)
1574 tree decl;
1575 struct attrib *attr_list;
1576{
1577 for (; attr_list; attr_list = attr_list->next)
1578 switch (attr_list->type)
1579 {
1580 case ATTR_MACHINE_ATTRIBUTE:
1581 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1582 NULL_TREE),
1583 ATTR_FLAG_TYPE_IN_PLACE);
1584 break;
1585
1586 case ATTR_LINK_ALIAS:
1587 TREE_STATIC (decl) = 1;
1588 assemble_alias (decl, attr_list->name);
1589 break;
1590
1591 case ATTR_WEAK_EXTERNAL:
1592 if (SUPPORTS_WEAK)
1593 declare_weak (decl);
1594 else
1595 post_error ("?weak declarations not supported on this target",
1596 attr_list->error_point);
1597 break;
1598
1599 case ATTR_LINK_SECTION:
1600#ifdef ASM_OUTPUT_SECTION_NAME
1601 DECL_SECTION_NAME (decl)
1602 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1603 IDENTIFIER_POINTER (attr_list->name));
1604 DECL_COMMON (decl) = 0;
1605#else
1606 post_error ("?section attributes are not supported for this target",
1607 attr_list->error_point);
1608#endif
1609 break;
1610 }
1611}
1612\f
1613/* Add some pending elaborations on the list. */
1614
80eaf415 1615void
415dddc8
RK
1616add_pending_elaborations (var_decl, var_init)
1617 tree var_decl;
1618 tree var_init;
1619{
1620 if (var_init != 0)
1621 Check_Elaboration_Code_Allowed (error_gnat_node);
1622
1623 pending_elaborations
1624 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1625}
1626
1627/* Obtain any pending elaborations and clear the old list. */
1628
1629tree
1630get_pending_elaborations ()
1631{
1632 /* Each thing added to the list went on the end; we want it on the
1633 beginning. */
1634 tree result = TREE_CHAIN (pending_elaborations);
1635
1636 TREE_CHAIN (pending_elaborations) = 0;
1637 return result;
1638}
1639
fbf5a39b
AC
1640/* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1641 of 2. */
1642
1643static int
1644value_factor_p (value, factor)
1645 tree value;
1646 int factor;
1647{
1648 if (host_integerp (value, 1))
1649 return tree_low_cst (value, 1) % factor == 0;
1650
1651 if (TREE_CODE (value) == MULT_EXPR)
1652 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1653 || value_factor_p (TREE_OPERAND (value, 1), factor));
1654
1655 return 0;
1656}
1657
1658/* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1659 unless we can prove these 2 fields are laid out in such a way that no gap
1660 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1661 is the distance in bits between the end of PREV_FIELD and the starting
1662 position of CURR_FIELD. It is ignored if null. */
1663
1664static int
1665potential_alignment_gap (prev_field, curr_field, offset)
1666 tree prev_field;
1667 tree curr_field;
1668 tree offset;
1669{
1670 /* If this is the first field of the record, there cannot be any gap */
1671 if (!prev_field)
1672 return 0;
1673
1674 /* If the previous field is a union type, then return False: The only
1675 time when such a field is not the last field of the record is when
1676 there are other components at fixed positions after it (meaning there
1677 was a rep clause for every field), in which case we don't want the
1678 alignment constraint to override them. */
1679 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1680 return 0;
1681
1682 /* If the distance between the end of prev_field and the begining of
1683 curr_field is constant, then there is a gap if the value of this
1684 constant is not null. */
1685 if (offset && host_integerp (offset, 1))
1686 return (!integer_zerop (offset));
1687
1688 /* If the size and position of the previous field are constant,
1689 then check the sum of this size and position. There will be a gap
1690 iff it is not multiple of the current field alignment. */
1691 if (host_integerp (DECL_SIZE (prev_field), 1)
1692 && host_integerp (bit_position (prev_field), 1))
1693 return ((tree_low_cst (bit_position (prev_field), 1)
1694 + tree_low_cst (DECL_SIZE (prev_field), 1))
1695 % DECL_ALIGN (curr_field) != 0);
1696
1697 /* If both the position and size of the previous field are multiples
1698 of the current field alignment, there can not be any gap. */
1699 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1700 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1701 return 0;
1702
1703 /* Fallback, return that there may be a potential gap */
1704 return 1;
1705}
1706
415dddc8
RK
1707/* Return nonzero if there are pending elaborations. */
1708
1709int
1710pending_elaborations_p ()
1711{
1712 return TREE_CHAIN (pending_elaborations) != 0;
1713}
1714
1715/* Save a copy of the current pending elaboration list and make a new
1716 one. */
1717
1718void
1719push_pending_elaborations ()
1720{
e2500fed 1721 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
415dddc8
RK
1722
1723 p->next = elist_stack;
1724 p->elab_list = pending_elaborations;
1725 elist_stack = p;
1726 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1727}
1728
1729/* Pop the stack of pending elaborations. */
1730
1731void
1732pop_pending_elaborations ()
1733{
1734 struct e_stack *p = elist_stack;
1735
1736 pending_elaborations = p->elab_list;
1737 elist_stack = p->next;
415dddc8
RK
1738}
1739
1740/* Return the current position in pending_elaborations so we can insert
1741 elaborations after that point. */
1742
1743tree
1744get_elaboration_location ()
1745{
1746 return tree_last (pending_elaborations);
1747}
1748
1749/* Insert the current elaborations after ELAB, which is in some elaboration
1750 list. */
1751
1752void
1753insert_elaboration_list (elab)
1754 tree elab;
1755{
1756 tree next = TREE_CHAIN (elab);
1757
1758 if (TREE_CHAIN (pending_elaborations))
1759 {
1760 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1761 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1762 TREE_CHAIN (pending_elaborations) = 0;
1763 }
1764}
1765
1766/* Returns a LABEL_DECL node for LABEL_NAME. */
1767
1768tree
1769create_label_decl (label_name)
1770 tree label_name;
1771{
1772 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1773
f31686a3
RH
1774 DECL_CONTEXT (label_decl) = current_function_decl;
1775 DECL_MODE (label_decl) = VOIDmode;
1776 DECL_SOURCE_LOCATION (label_decl) = input_location;
415dddc8
RK
1777
1778 return label_decl;
1779}
1780\f
1781/* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1782 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1783 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1784 PARM_DECL nodes chained through the TREE_CHAIN field).
1785
07fc65c4
GB
1786 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1787 appropriate fields in the FUNCTION_DECL. */
415dddc8
RK
1788
1789tree
1790create_subprog_decl (subprog_name, asm_name, subprog_type, param_decl_list,
1791 inline_flag, public_flag, extern_flag, attr_list)
1792 tree subprog_name;
1793 tree asm_name;
1794 tree subprog_type;
1795 tree param_decl_list;
1796 int inline_flag;
1797 int public_flag;
1798 int extern_flag;
1799 struct attrib *attr_list;
1800{
1801 tree return_type = TREE_TYPE (subprog_type);
1802 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1803
1804 /* If this is a function nested inside an inlined external function, it
1805 means we aren't going to compile the outer function unless it is
1806 actually inlined, so do the same for us. */
1807 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1808 && DECL_EXTERNAL (current_function_decl))
1809 extern_flag = 1;
1810
1811 DECL_EXTERNAL (subprog_decl) = extern_flag;
1812 TREE_PUBLIC (subprog_decl) = public_flag;
1813 DECL_INLINE (subprog_decl) = inline_flag;
1814 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1815 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1816 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1817 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1818 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1819
1820 if (asm_name != 0)
82a4b025 1821 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
415dddc8
RK
1822
1823 process_attributes (subprog_decl, attr_list);
1824
1825 /* Add this decl to the current binding level. */
1826 subprog_decl = pushdecl (subprog_decl);
1827
1828 /* Output the assembler code and/or RTL for the declaration. */
1829 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1830
1831 return subprog_decl;
1832}
1833\f
1834/* Count how deep we are into nested functions. This is because
1835 we shouldn't call the backend function context routines unless we
1836 are in a nested function. */
1837
1838static int function_nesting_depth;
1839
1840/* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1841 body. This routine needs to be invoked before processing the declarations
1842 appearing in the subprogram. */
1843
1844void
1845begin_subprog_body (subprog_decl)
1846 tree subprog_decl;
1847{
1848 tree param_decl_list;
1849 tree param_decl;
1850 tree next_param;
1851
1852 if (function_nesting_depth++ != 0)
1853 push_function_context ();
1854
1855 announce_function (subprog_decl);
1856
1857 /* Make this field nonzero so further routines know that this is not
1858 tentative. error_mark_node is replaced below (in poplevel) with the
1859 adequate BLOCK. */
1860 DECL_INITIAL (subprog_decl) = error_mark_node;
1861
1862 /* This function exists in static storage. This does not mean `static' in
1863 the C sense! */
1864 TREE_STATIC (subprog_decl) = 1;
1865
1866 /* Enter a new binding level. */
1867 current_function_decl = subprog_decl;
1868 pushlevel (0);
1869
1870 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1871 subprogram body) so that they can be recognized as local variables in the
80eaf415 1872 subprogram.
415dddc8
RK
1873
1874 The list of PARM_DECL nodes is stored in the right order in
1875 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1876 which they are transmitted to `pushdecl' we need to reverse the list of
1877 PARM_DECLs if we want it to be stored in the right order. The reason why
1878 we want to make sure the PARM_DECLs are stored in the correct order is
1879 that this list will be retrieved in a few lines with a call to `getdecl'
1880 to store it back into the DECL_ARGUMENTS field. */
1881 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1882
1883 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1884 {
1885 next_param = TREE_CHAIN (param_decl);
1886 TREE_CHAIN (param_decl) = NULL;
1887 pushdecl (param_decl);
1888 }
1889
1890 /* Store back the PARM_DECL nodes. They appear in the right order. */
1891 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1892
ee6b0296 1893 init_function_start (subprog_decl);
415dddc8 1894 expand_function_start (subprog_decl, 0);
415dddc8 1895
07fc65c4
GB
1896 /* If this function is `main', emit a call to `__main'
1897 to run global initializers, etc. */
1898 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1899 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1900 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1901 expand_main_function ();
1902}
415dddc8
RK
1903
1904/* Finish the definition of the current subprogram and compile it all the way
1905 to assembler language output. */
1906
1907void
b0df4321 1908end_subprog_body ()
415dddc8
RK
1909{
1910 tree decl;
1911 tree cico_list;
1912
1913 poplevel (1, 0, 1);
1914 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1915 = current_function_decl;
1916
1917 /* Mark the RESULT_DECL as being in this subprogram. */
1918 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1919
1f9cc6db 1920 expand_function_end ();
72ac12bf
RH
1921
1922 /* If this is a nested function, push a new GC context. That will keep
1923 local variables on the stack from being collected while we're doing
1924 the compilation of this function. */
1925 if (function_nesting_depth > 1)
1926 ggc_push_context ();
1927
415dddc8
RK
1928 rest_of_compilation (current_function_decl);
1929
72ac12bf
RH
1930 if (function_nesting_depth > 1)
1931 ggc_pop_context ();
1932
415dddc8
RK
1933 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1934 not be seen when we call this function and will be in
1935 unallocated memory anyway. */
1936 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1937 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1938 TREE_VALUE (cico_list) = 0;
1939
1940 if (DECL_SAVED_INSNS (current_function_decl) == 0)
1941 {
1942 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1943 was saved for inline, in which case the DECL_RTLs are in
1944 preserved memory. */
1945 for (decl = DECL_ARGUMENTS (current_function_decl);
1946 decl != 0; decl = TREE_CHAIN (decl))
1947 {
1948 SET_DECL_RTL (decl, 0);
1949 DECL_INCOMING_RTL (decl) = 0;
1950 }
1951
1952 /* Similarly, discard DECL_RTL of the return value. */
1953 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1954
1955 /* But DECL_INITIAL must remain nonzero so we know this
1956 was an actual function definition unless toplev.c decided not
1957 to inline it. */
1958 if (DECL_INITIAL (current_function_decl) != 0)
1959 DECL_INITIAL (current_function_decl) = error_mark_node;
1960
1961 DECL_ARGUMENTS (current_function_decl) = 0;
1962 }
1963
1964 /* If we are not at the bottom of the function nesting stack, pop up to
1965 the containing function. Otherwise show we aren't in any function. */
1966 if (--function_nesting_depth != 0)
1967 pop_function_context ();
1968 else
1969 current_function_decl = 0;
1970}
1971\f
1972/* Return a definition for a builtin function named NAME and whose data type
1973 is TYPE. TYPE should be a function type with argument types.
1974 FUNCTION_CODE tells later passes how to compile calls to this function.
1975 See tree.h for its possible values.
1976
1977 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
6a2dd09a
RS
1978 the name to be called if we can't opencode the function. If
1979 ATTRS is nonzero, use that for the function attribute list. */
415dddc8
RK
1980
1981tree
6a2dd09a 1982builtin_function (name, type, function_code, class, library_name, attrs)
415dddc8
RK
1983 const char *name;
1984 tree type;
1985 int function_code;
1986 enum built_in_class class;
1987 const char *library_name;
6a2dd09a 1988 tree attrs;
415dddc8
RK
1989{
1990 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1991
1992 DECL_EXTERNAL (decl) = 1;
1993 TREE_PUBLIC (decl) = 1;
1994 if (library_name)
82a4b025 1995 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
415dddc8
RK
1996
1997 pushdecl (decl);
1998 DECL_BUILT_IN_CLASS (decl) = class;
1999 DECL_FUNCTION_CODE (decl) = function_code;
6a2dd09a
RS
2000 if (attrs)
2001 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
415dddc8
RK
2002 return decl;
2003}
2004
80eaf415 2005/* Return an integer type with the number of bits of precision given by
415dddc8
RK
2006 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2007 it is a signed type. */
2008
2009tree
b0c48229 2010gnat_type_for_size (precision, unsignedp)
415dddc8
RK
2011 unsigned precision;
2012 int unsignedp;
2013{
2014 tree t;
2015 char type_name[20];
2016
2017 if (precision <= 2 * MAX_BITS_PER_WORD
2018 && signed_and_unsigned_types[precision][unsignedp] != 0)
2019 return signed_and_unsigned_types[precision][unsignedp];
2020
2021 if (unsignedp)
2022 t = make_unsigned_type (precision);
2023 else
2024 t = make_signed_type (precision);
2025
2026 if (precision <= 2 * MAX_BITS_PER_WORD)
2027 signed_and_unsigned_types[precision][unsignedp] = t;
2028
2029 if (TYPE_NAME (t) == 0)
2030 {
2031 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2032 TYPE_NAME (t) = get_identifier (type_name);
2033 }
2034
2035 return t;
2036}
2037
2038/* Likewise for floating-point types. */
2039
2040static tree
2041float_type_for_size (precision, mode)
2042 int precision;
2043 enum machine_mode mode;
2044{
2045 tree t;
2046 char type_name[20];
2047
2048 if (float_types[(int) mode] != 0)
2049 return float_types[(int) mode];
2050
2051 float_types[(int) mode] = t = make_node (REAL_TYPE);
2052 TYPE_PRECISION (t) = precision;
2053 layout_type (t);
2054
2055 if (TYPE_MODE (t) != mode)
2056 gigi_abort (414);
2057
2058 if (TYPE_NAME (t) == 0)
2059 {
2060 sprintf (type_name, "FLOAT_%d", precision);
2061 TYPE_NAME (t) = get_identifier (type_name);
2062 }
2063
2064 return t;
2065}
2066
2067/* Return a data type that has machine mode MODE. UNSIGNEDP selects
2068 an unsigned type; otherwise a signed type is returned. */
2069
2070tree
b0c48229 2071gnat_type_for_mode (mode, unsignedp)
415dddc8
RK
2072 enum machine_mode mode;
2073 int unsignedp;
2074{
2075 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2076 return float_type_for_size (GET_MODE_BITSIZE (mode), mode);
2077 else
b0c48229 2078 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
415dddc8
RK
2079}
2080
2081/* Return the unsigned version of a TYPE_NODE, a scalar type. */
2082
2083tree
ceef8ce4 2084gnat_unsigned_type (type_node)
415dddc8
RK
2085 tree type_node;
2086{
b0c48229 2087 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
415dddc8
RK
2088
2089 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2090 {
2091 type = copy_node (type);
2092 TREE_TYPE (type) = type_node;
2093 }
2094 else if (TREE_TYPE (type_node) != 0
2095 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2096 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2097 {
2098 type = copy_node (type);
2099 TREE_TYPE (type) = TREE_TYPE (type_node);
2100 }
2101
2102 return type;
2103}
2104
2105/* Return the signed version of a TYPE_NODE, a scalar type. */
2106
2107tree
ceef8ce4 2108gnat_signed_type (type_node)
415dddc8
RK
2109 tree type_node;
2110{
b0c48229 2111 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
415dddc8
RK
2112
2113 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2114 {
2115 type = copy_node (type);
2116 TREE_TYPE (type) = type_node;
2117 }
2118 else if (TREE_TYPE (type_node) != 0
2119 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2120 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2121 {
2122 type = copy_node (type);
2123 TREE_TYPE (type) = TREE_TYPE (type_node);
2124 }
2125
2126 return type;
2127}
2128
2129/* Return a type the same as TYPE except unsigned or signed according to
2130 UNSIGNEDP. */
2131
2132tree
ceef8ce4 2133gnat_signed_or_unsigned_type (unsignedp, type)
415dddc8
RK
2134 int unsignedp;
2135 tree type;
2136{
2137 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2138 return type;
2139 else
b0c48229 2140 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
415dddc8
RK
2141}
2142\f
2143/* EXP is an expression for the size of an object. If this size contains
2144 discriminant references, replace them with the maximum (if MAX_P) or
2145 minimum (if ! MAX_P) possible value of the discriminant. */
2146
2147tree
2148max_size (exp, max_p)
2149 tree exp;
2150 int max_p;
2151{
2152 enum tree_code code = TREE_CODE (exp);
2153 tree type = TREE_TYPE (exp);
2154
2155 switch (TREE_CODE_CLASS (code))
2156 {
2157 case 'd':
2158 case 'c':
2159 return exp;
2160
2161 case 'x':
2162 if (code == TREE_LIST)
2163 return tree_cons (TREE_PURPOSE (exp),
2164 max_size (TREE_VALUE (exp), max_p),
2165 TREE_CHAIN (exp) != 0
2166 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2167 break;
2168
2169 case 'r':
2170 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
fbf5a39b
AC
2171 modify. Otherwise, we treat it like a variable. */
2172 if (! CONTAINS_PLACEHOLDER_P (exp))
2173 return exp;
415dddc8
RK
2174
2175 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2176 return
2177 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2178
2179 case '<':
2180 return max_p ? size_one_node : size_zero_node;
2181
2182 case '1':
2183 case '2':
2184 case 'e':
2185 switch (TREE_CODE_LENGTH (code))
2186 {
2187 case 1:
2188 if (code == NON_LVALUE_EXPR)
2189 return max_size (TREE_OPERAND (exp, 0), max_p);
2190 else
2191 return
2192 fold (build1 (code, type,
2193 max_size (TREE_OPERAND (exp, 0),
2194 code == NEGATE_EXPR ? ! max_p : max_p)));
2195
2196 case 2:
2197 if (code == RTL_EXPR)
2198 gigi_abort (407);
2199 else if (code == COMPOUND_EXPR)
2200 return max_size (TREE_OPERAND (exp, 1), max_p);
2201 else if (code == WITH_RECORD_EXPR)
2202 return exp;
2203
2204 {
2205 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2206 tree rhs = max_size (TREE_OPERAND (exp, 1),
2207 code == MINUS_EXPR ? ! max_p : max_p);
2208
2209 /* Special-case wanting the maximum value of a MIN_EXPR.
2210 In that case, if one side overflows, return the other.
2211 sizetype is signed, but we know sizes are non-negative.
2212 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2213 overflowing or the maximum possible value and the RHS
2214 a variable. */
2215 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2216 return lhs;
2217 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2218 return rhs;
2219 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
fbf5a39b 2220 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
415dddc8
RK
2221 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2222 && ! TREE_CONSTANT (rhs))
2223 return lhs;
2224 else
2225 return fold (build (code, type, lhs, rhs));
2226 }
2227
2228 case 3:
2229 if (code == SAVE_EXPR)
2230 return exp;
2231 else if (code == COND_EXPR)
2232 return fold (build (MAX_EXPR, type,
2233 max_size (TREE_OPERAND (exp, 1), max_p),
2234 max_size (TREE_OPERAND (exp, 2), max_p)));
2235 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2236 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2237 max_size (TREE_OPERAND (exp, 1), max_p));
2238 }
2239 }
2240
2241 gigi_abort (408);
2242}
2243\f
2244/* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2245 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2246 Return a constructor for the template. */
2247
2248tree
2249build_template (template_type, array_type, expr)
2250 tree template_type;
2251 tree array_type;
2252 tree expr;
2253{
2254 tree template_elts = NULL_TREE;
2255 tree bound_list = NULL_TREE;
2256 tree field;
2257
2258 if (TREE_CODE (array_type) == RECORD_TYPE
2259 && (TYPE_IS_PADDING_P (array_type)
2260 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2261 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2262
2263 if (TREE_CODE (array_type) == ARRAY_TYPE
2264 || (TREE_CODE (array_type) == INTEGER_TYPE
2265 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2266 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2267
2268 /* First make the list for a CONSTRUCTOR for the template. Go down the
2269 field list of the template instead of the type chain because this
2270 array might be an Ada array of arrays and we can't tell where the
2271 nested arrays stop being the underlying object. */
2272
2273 for (field = TYPE_FIELDS (template_type); field;
2274 (bound_list != 0
2275 ? (bound_list = TREE_CHAIN (bound_list))
2276 : (array_type = TREE_TYPE (array_type))),
2277 field = TREE_CHAIN (TREE_CHAIN (field)))
2278 {
2279 tree bounds, min, max;
2280
2281 /* If we have a bound list, get the bounds from there. Likewise
2282 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2283 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2284 This will give us a maximum range. */
2285 if (bound_list != 0)
2286 bounds = TREE_VALUE (bound_list);
2287 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2288 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2289 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2290 && DECL_BY_COMPONENT_PTR_P (expr))
2291 bounds = TREE_TYPE (field);
2292 else
2293 gigi_abort (411);
2294
2295 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2296 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2297
2298 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2299 surround them with a WITH_RECORD_EXPR giving EXPR as the
2300 OBJECT. */
fbf5a39b 2301 if (CONTAINS_PLACEHOLDER_P (min))
415dddc8 2302 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
fbf5a39b 2303 if (CONTAINS_PLACEHOLDER_P (max))
415dddc8
RK
2304 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2305
2306 template_elts = tree_cons (TREE_CHAIN (field), max,
2307 tree_cons (field, min, template_elts));
2308 }
2309
dcf92453 2310 return gnat_build_constructor (template_type, nreverse (template_elts));
415dddc8
RK
2311}
2312\f
2313/* Build a VMS descriptor from a Mechanism_Type, which must specify
2314 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2315 in the type contains in its DECL_INITIAL the expression to use when
2316 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2317 to print out an error message if the mechanism cannot be applied to
2318 an object of that type and also for the name. */
2319
2320tree
2321build_vms_descriptor (type, mech, gnat_entity)
2322 tree type;
2323 Mechanism_Type mech;
2324 Entity_Id gnat_entity;
2325{
2326 tree record_type = make_node (RECORD_TYPE);
2327 tree field_list = 0;
2328 int class;
2329 int dtype = 0;
2330 tree inner_type;
2331 int ndim;
2332 int i;
2333 tree *idx_arr;
2334 tree tem;
2335
2336 /* If TYPE is an unconstrained array, use the underlying array type. */
2337 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2338 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2339
2340 /* If this is an array, compute the number of dimensions in the array,
2341 get the index types, and point to the inner type. */
2342 if (TREE_CODE (type) != ARRAY_TYPE)
2343 ndim = 0;
2344 else
2345 for (ndim = 1, inner_type = type;
2346 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2347 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2348 ndim++, inner_type = TREE_TYPE (inner_type))
2349 ;
2350
2351 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2352
2353 if (mech != By_Descriptor_NCA
2354 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2355 for (i = ndim - 1, inner_type = type;
2356 i >= 0;
2357 i--, inner_type = TREE_TYPE (inner_type))
2358 idx_arr[i] = TYPE_DOMAIN (inner_type);
2359 else
2360 for (i = 0, inner_type = type;
2361 i < ndim;
2362 i++, inner_type = TREE_TYPE (inner_type))
2363 idx_arr[i] = TYPE_DOMAIN (inner_type);
2364
2365 /* Now get the DTYPE value. */
2366 switch (TREE_CODE (type))
2367 {
2368 case INTEGER_TYPE:
2369 case ENUMERAL_TYPE:
2370 if (TYPE_VAX_FLOATING_POINT_P (type))
2371 switch ((int) TYPE_DIGITS_VALUE (type))
2372 {
2373 case 6:
2374 dtype = 10;
2375 break;
2376 case 9:
2377 dtype = 11;
2378 break;
2379 case 15:
2380 dtype = 27;
2381 break;
2382 }
2383 else
2384 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2385 {
2386 case 8:
2387 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2388 break;
2389 case 16:
2390 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2391 break;
2392 case 32:
2393 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2394 break;
2395 case 64:
2396 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2397 break;
2398 case 128:
2399 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2400 break;
2401 }
2402 break;
2403
2404 case REAL_TYPE:
2405 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2406 break;
2407
2408 case COMPLEX_TYPE:
2409 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2410 && TYPE_VAX_FLOATING_POINT_P (type))
2411 switch ((int) TYPE_DIGITS_VALUE (type))
2412 {
2413 case 6:
2414 dtype = 12;
2415 break;
2416 case 9:
2417 dtype = 13;
2418 break;
2419 case 15:
2420 dtype = 29;
2421 }
2422 else
2423 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2424 break;
2425
2426 case ARRAY_TYPE:
2427 dtype = 14;
2428 break;
2429
2430 default:
2431 break;
2432 }
2433
2434 /* Get the CLASS value. */
2435 switch (mech)
2436 {
2437 case By_Descriptor_A:
2438 class = 4;
2439 break;
2440 case By_Descriptor_NCA:
2441 class = 10;
2442 break;
2443 case By_Descriptor_SB:
2444 class = 15;
2445 break;
2446 default:
2447 class = 1;
2448 }
2449
2450 /* Make the type for a descriptor for VMS. The first four fields
2451 are the same for all types. */
2452
2453 field_list
2454 = chainon (field_list,
2455 make_descriptor_field
b0c48229 2456 ("LENGTH", gnat_type_for_size (16, 1), record_type,
415dddc8
RK
2457 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2458
2459 field_list = chainon (field_list,
b0c48229
NB
2460 make_descriptor_field ("DTYPE",
2461 gnat_type_for_size (8, 1),
415dddc8
RK
2462 record_type, size_int (dtype)));
2463 field_list = chainon (field_list,
b0c48229
NB
2464 make_descriptor_field ("CLASS",
2465 gnat_type_for_size (8, 1),
415dddc8
RK
2466 record_type, size_int (class)));
2467
2468 field_list
2469 = chainon (field_list,
2470 make_descriptor_field ("POINTER",
2471 build_pointer_type (type),
2472 record_type,
2473 build1 (ADDR_EXPR,
2474 build_pointer_type (type),
2475 build (PLACEHOLDER_EXPR,
2476 type))));
2477
2478 switch (mech)
2479 {
2480 case By_Descriptor:
2481 case By_Descriptor_S:
2482 break;
2483
2484 case By_Descriptor_SB:
2485 field_list
2486 = chainon (field_list,
80eaf415 2487 make_descriptor_field
b0c48229 2488 ("SB_L1", gnat_type_for_size (32, 1), record_type,
415dddc8
RK
2489 TREE_CODE (type) == ARRAY_TYPE
2490 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2491 field_list
2492 = chainon (field_list,
2493 make_descriptor_field
b0c48229 2494 ("SB_L2", gnat_type_for_size (32, 1), record_type,
415dddc8
RK
2495 TREE_CODE (type) == ARRAY_TYPE
2496 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2497 break;
2498
2499 case By_Descriptor_A:
2500 case By_Descriptor_NCA:
2501 field_list = chainon (field_list,
2502 make_descriptor_field ("SCALE",
b0c48229 2503 gnat_type_for_size (8, 1),
415dddc8
RK
2504 record_type,
2505 size_zero_node));
2506
2507 field_list = chainon (field_list,
2508 make_descriptor_field ("DIGITS",
b0c48229 2509 gnat_type_for_size (8, 1),
415dddc8
RK
2510 record_type,
2511 size_zero_node));
2512
2513 field_list
2514 = chainon (field_list,
2515 make_descriptor_field
b0c48229 2516 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
415dddc8
RK
2517 size_int (mech == By_Descriptor_NCA
2518 ? 0
2519 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2520 : (TREE_CODE (type) == ARRAY_TYPE
2521 && TYPE_CONVENTION_FORTRAN_P (type)
2522 ? 224 : 192))));
2523
2524 field_list = chainon (field_list,
2525 make_descriptor_field ("DIMCT",
b0c48229 2526 gnat_type_for_size (8, 1),
415dddc8
RK
2527 record_type,
2528 size_int (ndim)));
2529
2530 field_list = chainon (field_list,
2531 make_descriptor_field ("ARSIZE",
b0c48229 2532 gnat_type_for_size (32, 1),
415dddc8
RK
2533 record_type,
2534 size_in_bytes (type)));
2535
2536 /* Now build a pointer to the 0,0,0... element. */
2537 tem = build (PLACEHOLDER_EXPR, type);
2538 for (i = 0, inner_type = type; i < ndim;
2539 i++, inner_type = TREE_TYPE (inner_type))
2540 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2541 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2542
2543 field_list
2544 = chainon (field_list,
2545 make_descriptor_field
2546 ("A0", build_pointer_type (inner_type), record_type,
2547 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2548
2549 /* Next come the addressing coefficients. */
2550 tem = size_int (1);
2551 for (i = 0; i < ndim; i++)
2552 {
2553 char fname[3];
2554 tree idx_length
2555 = size_binop (MULT_EXPR, tem,
2556 size_binop (PLUS_EXPR,
2557 size_binop (MINUS_EXPR,
2558 TYPE_MAX_VALUE (idx_arr[i]),
2559 TYPE_MIN_VALUE (idx_arr[i])),
2560 size_int (1)));
2561
2562 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2563 fname[1] = '0' + i, fname[2] = 0;
b0c48229
NB
2564 field_list
2565 = chainon (field_list,
2566 make_descriptor_field (fname,
2567 gnat_type_for_size (32, 1),
2568 record_type, idx_length));
415dddc8
RK
2569
2570 if (mech == By_Descriptor_NCA)
2571 tem = idx_length;
2572 }
2573
2574 /* Finally here are the bounds. */
2575 for (i = 0; i < ndim; i++)
2576 {
2577 char fname[3];
2578
2579 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2580 field_list
2581 = chainon (field_list,
2582 make_descriptor_field
b0c48229 2583 (fname, gnat_type_for_size (32, 1), record_type,
415dddc8
RK
2584 TYPE_MIN_VALUE (idx_arr[i])));
2585
2586 fname[0] = 'U';
2587 field_list
2588 = chainon (field_list,
2589 make_descriptor_field
b0c48229 2590 (fname, gnat_type_for_size (32, 1), record_type,
415dddc8
RK
2591 TYPE_MAX_VALUE (idx_arr[i])));
2592 }
2593 break;
2594
2595 default:
2596 post_error ("unsupported descriptor type for &", gnat_entity);
2597 }
2598
2599 finish_record_type (record_type, field_list, 0, 1);
2600 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2601 record_type));
2602
2603 return record_type;
2604}
2605
2606/* Utility routine for above code to make a field. */
2607
2608static tree
2609make_descriptor_field (name, type, rec_type, initial)
2610 const char *name;
2611 tree type;
2612 tree rec_type;
2613 tree initial;
2614{
2615 tree field
2616 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2617
2618 DECL_INITIAL (field) = initial;
2619 return field;
2620}
2621\f
2622/* Build a type to be used to represent an aliased object whose nominal
2623 type is an unconstrained array. This consists of a RECORD_TYPE containing
2624 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2625 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2626 is used to represent an arbitrary unconstrained object. Use NAME
2627 as the name of the record. */
2628
2629tree
2630build_unc_object_type (template_type, object_type, name)
2631 tree template_type;
2632 tree object_type;
2633 tree name;
2634{
2635 tree type = make_node (RECORD_TYPE);
2636 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2637 template_type, type, 0, 0, 0, 1);
2638 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2639 type, 0, 0, 0, 1);
2640
2641 TYPE_NAME (type) = name;
2642 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2643 finish_record_type (type,
2644 chainon (chainon (NULL_TREE, template_field),
2645 array_field),
2646 0, 0);
2647
2648 return type;
2649}
2650\f
2651/* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2652 the normal case this is just two adjustments, but we have more to do
2653 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2654
2655void
2656update_pointer_to (old_type, new_type)
2657 tree old_type;
2658 tree new_type;
2659{
2660 tree ptr = TYPE_POINTER_TO (old_type);
2661 tree ref = TYPE_REFERENCE_TO (old_type);
7a3a8c06
RK
2662 tree type;
2663
2664 /* If this is the main variant, process all the other variants first. */
2665 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2666 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2667 type = TYPE_NEXT_VARIANT (type))
2668 update_pointer_to (type, new_type);
2669
fbf5a39b 2670 /* If no pointer or reference, we are done. */
7a3a8c06
RK
2671 if (ptr == 0 && ref == 0)
2672 return;
415dddc8 2673
fbf5a39b
AC
2674 /* Merge the old type qualifiers in the new type.
2675
2676 Each old variant has qualifiers for specific reasons, and the new
2677 designated type as well. Each set of qualifiers represents useful
2678 information grabbed at some point, and merging the two simply unifies
2679 these inputs into the final type description.
2680
2681 Consider for instance a volatile type frozen after an access to constant
2682 type designating it. After the designated type freeze, we get here with a
2683 volatile new_type and a dummy old_type with a readonly variant, created
2684 when the access type was processed. We shall make a volatile and readonly
2685 designated type, because that's what it really is.
2686
2687 We might also get here for a non-dummy old_type variant with different
2688 qualifiers than the new_type ones, for instance in some cases of pointers
2689 to private record type elaboration (see the comments around the call to
2690 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2691 qualifiers in thoses cases too, to avoid accidentally discarding the
2692 initial set, and will often end up with old_type == new_type then. */
2693 new_type = build_qualified_type (new_type,
2694 TYPE_QUALS (old_type)
2695 | TYPE_QUALS (new_type));
2696
2697 /* If the new type and the old one are identical, there is nothing to
2698 update. */
7a3a8c06 2699 if (old_type == new_type)
415dddc8
RK
2700 return;
2701
fbf5a39b 2702 /* Otherwise, first handle the simple case. */
415dddc8
RK
2703 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2704 {
2705 if (ptr != 0)
2706 TREE_TYPE (ptr) = new_type;
2707 TYPE_POINTER_TO (new_type) = ptr;
2708
2709 if (ref != 0)
2710 TREE_TYPE (ref) = new_type;
2711 TYPE_REFERENCE_TO (new_type) = ref;
2712
2713 if (ptr != 0 && TYPE_NAME (ptr) != 0
2714 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2715 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2716 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2717 global_bindings_p (), 0);
2718 if (ref != 0 && TYPE_NAME (ref) != 0
2719 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2720 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2721 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2722 global_bindings_p (), 0);
2723 }
2724
2725 /* Now deal with the unconstrained array case. In this case the "pointer"
2726 is actually a RECORD_TYPE where the types of both fields are
2727 pointers to void. In that case, copy the field list from the
2728 old type to the new one and update the fields' context. */
2729 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2730 gigi_abort (412);
2731
2732 else
2733 {
2734 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2735 tree ptr_temp_type;
2736 tree new_ref;
2737 tree var;
2738
2739 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2740 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2741 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2742
2743 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2744 template bounds.
2745
2746 ??? This is now the only use of gnat_substitute_in_type, which
2747 is now a very "heavy" routine to do this, so it should be replaced
2748 at some point. */
2749 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2750 new_ref = build (COMPONENT_REF, ptr_temp_type,
2751 build (PLACEHOLDER_EXPR, ptr),
2752 TREE_CHAIN (TYPE_FIELDS (ptr)));
2753
80eaf415 2754 update_pointer_to
415dddc8
RK
2755 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2756 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2757 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2758
2759 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
e2500fed 2760 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
415dddc8
RK
2761
2762 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2763 = TREE_TYPE (new_type) = ptr;
2764
2765 /* Now handle updating the allocation record, what the thin pointer
2766 points to. Update all pointers from the old record into the new
2767 one, update the types of the fields, and recompute the size. */
2768
2769 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2770
2771 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2772 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2773 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2774 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2775 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2776 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2777 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2778
2779 TYPE_SIZE (new_obj_rec)
2780 = size_binop (PLUS_EXPR,
2781 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2782 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2783 TYPE_SIZE_UNIT (new_obj_rec)
2784 = size_binop (PLUS_EXPR,
2785 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2786 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2787 rest_of_type_compilation (ptr, global_bindings_p ());
2788 }
2789}
2790\f
2791/* Convert a pointer to a constrained array into a pointer to a fat
2792 pointer. This involves making or finding a template. */
2793
2794static tree
2795convert_to_fat_pointer (type, expr)
2796 tree type;
2797 tree expr;
2798{
2799 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2800 tree template, template_addr;
2801 tree etype = TREE_TYPE (expr);
2802
2803 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2804 pointer to the template and array. */
2805 if (integer_zerop (expr))
2806 return
dcf92453 2807 gnat_build_constructor
415dddc8
RK
2808 (type,
2809 tree_cons (TYPE_FIELDS (type),
2810 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2811 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2812 convert (build_pointer_type (template_type),
2813 expr),
2814 NULL_TREE)));
2815
2816 /* If EXPR is a thin pointer, make the template and data from the record. */
2817
2818 else if (TYPE_THIN_POINTER_P (etype))
2819 {
2820 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2821
2822 expr = save_expr (expr);
2823 if (TREE_CODE (expr) == ADDR_EXPR)
2824 expr = TREE_OPERAND (expr, 0);
2825 else
2826 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2827
2828 template = build_component_ref (expr, NULL_TREE, fields);
2829 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2830 build_component_ref (expr, NULL_TREE,
2831 TREE_CHAIN (fields)));
2832 }
2833 else
2834 /* Otherwise, build the constructor for the template. */
2835 template = build_template (template_type, TREE_TYPE (etype), expr);
2836
2837 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2838
fbf5a39b
AC
2839 /* The result is a CONSTRUCTOR for the fat pointer.
2840
2841 If expr is an argument of a foreign convention subprogram, the type it
2842 points to is directly the component type. In this case, the expression
2843 type may not match the corresponding FIELD_DECL type at this point, so we
2844 call "convert" here to fix that up if necessary. This type consistency is
2845 required, for instance because it ensures that possible later folding of
2846 component_refs against this constructor always yields something of the
2847 same type as the initial reference.
2848
2849 Note that the call to "build_template" above is still fine, because it
2850 will only refer to the provided template_type in this case. */
2851 return
2852 gnat_build_constructor
2853 (type, tree_cons (TYPE_FIELDS (type),
2854 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2855 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2856 template_addr, NULL_TREE)));
415dddc8
RK
2857}
2858\f
2859/* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2860 is something that is a fat pointer, so convert to it first if it EXPR
2861 is not already a fat pointer. */
2862
2863static tree
2864convert_to_thin_pointer (type, expr)
2865 tree type;
2866 tree expr;
2867{
2868 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2869 expr
2870 = convert_to_fat_pointer
2871 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2872
2873 /* We get the pointer to the data and use a NOP_EXPR to make it the
2874 proper GCC type. */
2875 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)));
2876 expr = build1 (NOP_EXPR, type, expr);
2877
2878 return expr;
2879}
2880\f
2881/* Create an expression whose value is that of EXPR,
2882 converted to type TYPE. The TREE_TYPE of the value
2883 is always TYPE. This function implements all reasonable
2884 conversions; callers should filter out those that are
2885 not permitted by the language being compiled. */
2886
2887tree
2888convert (type, expr)
2889 tree type, expr;
2890{
2891 enum tree_code code = TREE_CODE (type);
2892 tree etype = TREE_TYPE (expr);
2893 enum tree_code ecode = TREE_CODE (etype);
2894 tree tem;
2895
2896 /* If EXPR is already the right type, we are done. */
2897 if (type == etype)
2898 return expr;
fbf5a39b
AC
2899 /* If we're converting between two aggregate types that have the same main
2900 variant, just make a NOP_EXPR. */
2901 else if (AGGREGATE_TYPE_P (type)
2902 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2903 return build1 (NOP_EXPR, type, expr);
415dddc8
RK
2904 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2905 new one. */
fbf5a39b 2906 else if (TREE_CODE (expr) == WITH_RECORD_EXPR)
415dddc8
RK
2907 return build (WITH_RECORD_EXPR, type,
2908 convert (type, TREE_OPERAND (expr, 0)),
2909 TREE_OPERAND (expr, 1));
2910
2911 /* If the input type has padding, remove it by doing a component reference
2912 to the field. If the output type has padding, make a constructor
2913 to build the record. If both input and output have padding and are
2914 of variable size, do this as an unchecked conversion. */
fbf5a39b 2915 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
415dddc8
RK
2916 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2917 && (! TREE_CONSTANT (TYPE_SIZE (type))
2918 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2919 ;
2920 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2921 {
2922 /* If we have just converted to this padded type, just get
2923 the inner expression. */
2924 if (TREE_CODE (expr) == CONSTRUCTOR
2925 && CONSTRUCTOR_ELTS (expr) != 0
2926 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2927 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2928 else
2929 return convert (type, build_component_ref (expr, NULL_TREE,
2930 TYPE_FIELDS (etype)));
2931 }
2932 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2933 {
2934 /* If we previously converted from another type and our type is
2935 of variable size, remove the conversion to avoid the need for
2936 variable-size temporaries. */
07fc65c4 2937 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
415dddc8
RK
2938 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2939 expr = TREE_OPERAND (expr, 0);
2940
2941 /* If we are just removing the padding from expr, convert the original
2942 object if we have variable size. That will avoid the need
2943 for some variable-size temporaries. */
2944 if (TREE_CODE (expr) == COMPONENT_REF
2945 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2946 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2947 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2948 return convert (type, TREE_OPERAND (expr, 0));
2949
2950 /* If the result type is a padded type with a self-referentially-sized
2951 field and the expression type is a record, do this as an
2952 unchecked converstion. */
fbf5a39b
AC
2953 else if (TREE_CODE (etype) == RECORD_TYPE
2954 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2955 return unchecked_convert (type, expr, 0);
415dddc8
RK
2956
2957 else
2958 return
dcf92453 2959 gnat_build_constructor (type,
415dddc8
RK
2960 tree_cons (TYPE_FIELDS (type),
2961 convert (TREE_TYPE
2962 (TYPE_FIELDS (type)),
2963 expr),
2964 NULL_TREE));
2965 }
2966
2967 /* If the input is a biased type, adjust first. */
2968 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2969 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2970 fold (build1 (GNAT_NOP_EXPR,
2971 TREE_TYPE (etype), expr)),
2972 TYPE_MIN_VALUE (etype))));
2973
2974 /* If the input is a left-justified modular type, we need to extract
2975 the actual object before converting it to any other type with the
2976 exception of an unconstrained array. */
2977 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2978 && code != UNCONSTRAINED_ARRAY_TYPE)
2979 return convert (type, build_component_ref (expr, NULL_TREE,
2980 TYPE_FIELDS (etype)));
2981
fbf5a39b
AC
2982 /* If converting to a type that contains a template, convert to the data
2983 type and then build the template. */
2984 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
415dddc8
RK
2985 {
2986 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2987
fbf5a39b
AC
2988 /* If the source already has a template, get a reference to the
2989 associated array only, as we are going to rebuild a template
2990 for the target type anyway. */
2991 expr = maybe_unconstrained_array (expr);
2992
415dddc8 2993 return
dcf92453 2994 gnat_build_constructor
415dddc8
RK
2995 (type,
2996 tree_cons (TYPE_FIELDS (type),
2997 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2998 obj_type, NULL_TREE),
2999 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3000 convert (obj_type, expr), NULL_TREE)));
3001 }
3002
3003 /* There are some special cases of expressions that we process
3004 specially. */
3005 switch (TREE_CODE (expr))
3006 {
3007 case ERROR_MARK:
3008 return expr;
3009
3010 case TRANSFORM_EXPR:
3011 case NULL_EXPR:
3012 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3013 conversion in gnat_expand_expr. NULL_EXPR does not represent
3014 and actual value, so no conversion is needed. */
fbf5a39b 3015 expr = copy_node (expr);
415dddc8
RK
3016 TREE_TYPE (expr) = type;
3017 return expr;
3018
3019 case STRING_CST:
3020 case CONSTRUCTOR:
3021 /* If we are converting a STRING_CST to another constrained array type,
3022 just make a new one in the proper type. Likewise for a
0864034e 3023 CONSTRUCTOR. */
415dddc8
RK
3024 if (code == ecode && AGGREGATE_TYPE_P (etype)
3025 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3026 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3027 {
3028 expr = copy_node (expr);
3029 TREE_TYPE (expr) = type;
415dddc8
RK
3030 return expr;
3031 }
3032 break;
3033
3034 case COMPONENT_REF:
3035 /* If we are converting between two aggregate types of the same
3036 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3037 This avoid unneeded conversions which makes reference computations
3038 more complex. */
3039 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3040 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3041 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3042 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
3043 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3044 TREE_OPERAND (expr, 1));
3045
3046 break;
3047
3048 case UNCONSTRAINED_ARRAY_REF:
3049 /* Convert this to the type of the inner array by getting the address of
3050 the array from the template. */
3051 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3052 build_component_ref (TREE_OPERAND (expr, 0),
3053 get_identifier ("P_ARRAY"),
3054 NULL_TREE));
3055 etype = TREE_TYPE (expr);
3056 ecode = TREE_CODE (etype);
3057 break;
3058
07fc65c4 3059 case VIEW_CONVERT_EXPR:
415dddc8
RK
3060 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3061 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3062 return convert (type, TREE_OPERAND (expr, 0));
3063 break;
3064
3065 case INDIRECT_REF:
3066 /* If both types are record types, just convert the pointer and
80eaf415 3067 make a new INDIRECT_REF.
415dddc8
RK
3068
3069 ??? Disable this for now since it causes problems with the
3070 code in build_binary_op for MODIFY_EXPR which wants to
3071 strip off conversions. But that code really is a mess and
3072 we need to do this a much better way some time. */
3073 if (0
3074 && (TREE_CODE (type) == RECORD_TYPE
3075 || TREE_CODE (type) == UNION_TYPE)
3076 && (TREE_CODE (etype) == RECORD_TYPE
3077 || TREE_CODE (etype) == UNION_TYPE)
3078 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3079 return build_unary_op (INDIRECT_REF, NULL_TREE,
3080 convert (build_pointer_type (type),
3081 TREE_OPERAND (expr, 0)));
3082 break;
3083
3084 default:
3085 break;
3086 }
3087
3088 /* Check for converting to a pointer to an unconstrained array. */
3089 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3090 return convert_to_fat_pointer (type, expr);
3091
3092 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3093 || (code == INTEGER_CST && ecode == INTEGER_CST
3094 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3095 return fold (build1 (NOP_EXPR, type, expr));
3096
3097 switch (code)
3098 {
3099 case VOID_TYPE:
3100 return build1 (CONVERT_EXPR, type, expr);
3101
3102 case INTEGER_TYPE:
3103 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
fbf5a39b
AC
3104 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3105 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3106 return unchecked_convert (type, expr, 0);
415dddc8
RK
3107 else if (TYPE_BIASED_REPRESENTATION_P (type))
3108 return fold (build1 (CONVERT_EXPR, type,
3109 fold (build (MINUS_EXPR, TREE_TYPE (type),
3110 convert (TREE_TYPE (type), expr),
3111 TYPE_MIN_VALUE (type)))));
3112
3113 /* ... fall through ... */
3114
3115 case ENUMERAL_TYPE:
3116 return fold (convert_to_integer (type, expr));
3117
3118 case POINTER_TYPE:
3119 case REFERENCE_TYPE:
3120 /* If converting between two pointers to records denoting
3121 both a template and type, adjust if needed to account
3122 for any differing offsets, since one might be negative. */
3123 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3124 {
3125 tree bit_diff
3126 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3127 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3128 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3129 sbitsize_int (BITS_PER_UNIT));
3130
3131 expr = build1 (NOP_EXPR, type, expr);
3132 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3133 if (integer_zerop (byte_diff))
3134 return expr;
3135
3136 return build_binary_op (PLUS_EXPR, type, expr,
3137 fold (convert_to_pointer (type, byte_diff)));
3138 }
3139
3140 /* If converting to a thin pointer, handle specially. */
3141 if (TYPE_THIN_POINTER_P (type)
3142 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3143 return convert_to_thin_pointer (type, expr);
3144
3145 /* If converting fat pointer to normal pointer, get the pointer to the
3146 array and then convert it. */
3147 else if (TYPE_FAT_POINTER_P (etype))
3148 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3149 NULL_TREE);
3150
3151 return fold (convert_to_pointer (type, expr));
3152
3153 case REAL_TYPE:
3154 return fold (convert_to_real (type, expr));
3155
3156 case RECORD_TYPE:
3157 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3158 return
dcf92453 3159 gnat_build_constructor
415dddc8
RK
3160 (type, tree_cons (TYPE_FIELDS (type),
3161 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3162 NULL_TREE));
3163
3164 /* ... fall through ... */
3165
3166 case ARRAY_TYPE:
3167 /* In these cases, assume the front-end has validated the conversion.
3168 If the conversion is valid, it will be a bit-wise conversion, so
3169 it can be viewed as an unchecked conversion. */
fbf5a39b 3170 return unchecked_convert (type, expr, 0);
415dddc8
RK
3171
3172 case UNION_TYPE:
3173 /* Just validate that the type is indeed that of a field
3174 of the type. Then make the simple conversion. */
3175 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
fbf5a39b
AC
3176 {
3177 if (TREE_TYPE (tem) == etype)
3178 return build1 (CONVERT_EXPR, type, expr);
3179 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3180 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3181 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3182 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3183 return build1 (CONVERT_EXPR, type,
3184 convert (TREE_TYPE (tem), expr));
3185 }
415dddc8
RK
3186
3187 gigi_abort (413);
3188
3189 case UNCONSTRAINED_ARRAY_TYPE:
3190 /* If EXPR is a constrained array, take its address, convert it to a
3191 fat pointer, and then dereference it. Likewise if EXPR is a
3192 record containing both a template and a constrained array.
3193 Note that a record representing a left justified modular type
3194 always represents a packed constrained array. */
3195 if (ecode == ARRAY_TYPE
3196 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3197 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3198 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3199 return
3200 build_unary_op
3201 (INDIRECT_REF, NULL_TREE,
3202 convert_to_fat_pointer (TREE_TYPE (type),
3203 build_unary_op (ADDR_EXPR,
3204 NULL_TREE, expr)));
3205
3206 /* Do something very similar for converting one unconstrained
3207 array to another. */
3208 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3209 return
3210 build_unary_op (INDIRECT_REF, NULL_TREE,
3211 convert (TREE_TYPE (type),
3212 build_unary_op (ADDR_EXPR,
3213 NULL_TREE, expr)));
3214 else
3215 gigi_abort (409);
3216
3217 case COMPLEX_TYPE:
3218 return fold (convert_to_complex (type, expr));
3219
3220 default:
3221 gigi_abort (410);
3222 }
3223}
3224\f
3225/* Remove all conversions that are done in EXP. This includes converting
07fc65c4
GB
3226 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3227 is nonzero, always return the address of the containing object even if
3228 the address is not bit-aligned. */
415dddc8
RK
3229
3230tree
07fc65c4 3231remove_conversions (exp, true_address)
415dddc8 3232 tree exp;
07fc65c4 3233 int true_address;
415dddc8
RK
3234{
3235 switch (TREE_CODE (exp))
3236 {
3237 case CONSTRUCTOR:
07fc65c4
GB
3238 if (true_address
3239 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
415dddc8 3240 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
07fc65c4 3241 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
415dddc8
RK
3242 break;
3243
3244 case COMPONENT_REF:
3245 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3246 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
07fc65c4 3247 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
415dddc8
RK
3248 break;
3249
07fc65c4
GB
3250 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3251 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3252 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
415dddc8
RK
3253
3254 default:
3255 break;
3256 }
3257
3258 return exp;
3259}
3260\f
3261/* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3262 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3263 likewise return an expression pointing to the underlying array. */
3264
3265tree
3266maybe_unconstrained_array (exp)
3267 tree exp;
3268{
3269 enum tree_code code = TREE_CODE (exp);
3270 tree new;
3271
3272 switch (TREE_CODE (TREE_TYPE (exp)))
3273 {
3274 case UNCONSTRAINED_ARRAY_TYPE:
3275 if (code == UNCONSTRAINED_ARRAY_REF)
3276 {
3277 new
3278 = build_unary_op (INDIRECT_REF, NULL_TREE,
3279 build_component_ref (TREE_OPERAND (exp, 0),
3280 get_identifier ("P_ARRAY"),
3281 NULL_TREE));
3282 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3283 return new;
3284 }
3285
3286 else if (code == NULL_EXPR)
3287 return build1 (NULL_EXPR,
3288 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3289 (TREE_TYPE (TREE_TYPE (exp))))),
3290 TREE_OPERAND (exp, 0));
3291
3292 else if (code == WITH_RECORD_EXPR
3293 && (TREE_OPERAND (exp, 0)
3294 != (new = maybe_unconstrained_array
3295 (TREE_OPERAND (exp, 0)))))
3296 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3297 TREE_OPERAND (exp, 1));
3298
3299 case RECORD_TYPE:
fbf5a39b
AC
3300 /* If this is a padded type, convert to the unpadded type and see if
3301 it contains a template. */
3302 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
415dddc8 3303 {
fbf5a39b 3304 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
415dddc8 3305 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
fbf5a39b
AC
3306 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3307 return
3308 build_component_ref (new, NULL_TREE,
3309 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))));
415dddc8 3310 }
fbf5a39b
AC
3311 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3312 return
3313 build_component_ref (exp, NULL_TREE,
3314 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))));
415dddc8
RK
3315 break;
3316
3317 default:
3318 break;
3319 }
3320
3321 return exp;
3322}
3323\f
fbf5a39b
AC
3324/* Return an expression that does an unchecked converstion of EXPR to TYPE.
3325 If NOTRUNC_P is set, truncation operations should be suppressed. */
415dddc8
RK
3326
3327tree
fbf5a39b 3328unchecked_convert (type, expr, notrunc_p)
415dddc8
RK
3329 tree type;
3330 tree expr;
fbf5a39b 3331 int notrunc_p;
415dddc8
RK
3332{
3333 tree etype = TREE_TYPE (expr);
3334
3335 /* If the expression is already the right type, we are done. */
3336 if (etype == type)
3337 return expr;
3338
3339 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3340 new one. */
3341 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3342 return build (WITH_RECORD_EXPR, type,
fbf5a39b 3343 unchecked_convert (type, TREE_OPERAND (expr, 0), notrunc_p),
415dddc8
RK
3344 TREE_OPERAND (expr, 1));
3345
3346 /* If both types types are integral just do a normal conversion.
3347 Likewise for a conversion to an unconstrained array. */
3348 if ((((INTEGRAL_TYPE_P (type)
3349 && ! (TREE_CODE (type) == INTEGER_TYPE
3350 && TYPE_VAX_FLOATING_POINT_P (type)))
3351 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3352 || (TREE_CODE (type) == RECORD_TYPE
3353 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3354 && ((INTEGRAL_TYPE_P (etype)
3355 && ! (TREE_CODE (etype) == INTEGER_TYPE
3356 && TYPE_VAX_FLOATING_POINT_P (etype)))
3357 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3358 || (TREE_CODE (etype) == RECORD_TYPE
3359 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3360 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3361 {
3362 tree rtype = type;
3363
3364 if (TREE_CODE (etype) == INTEGER_TYPE
3365 && TYPE_BIASED_REPRESENTATION_P (etype))
3366 {
3367 tree ntype = copy_type (etype);
3368
3369 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3370 TYPE_MAIN_VARIANT (ntype) = ntype;
3371 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3372 }
3373
3374 if (TREE_CODE (type) == INTEGER_TYPE
3375 && TYPE_BIASED_REPRESENTATION_P (type))
3376 {
3377 rtype = copy_type (type);
3378 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3379 TYPE_MAIN_VARIANT (rtype) = rtype;
3380 }
3381
3382 expr = convert (rtype, expr);
3383 if (type != rtype)
3384 expr = build1 (GNAT_NOP_EXPR, type, expr);
3385 }
3386
3387 /* If we are converting TO an integral type whose precision is not the
3388 same as its size, first unchecked convert to a record that contains
3389 an object of the output type. Then extract the field. */
3390 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3391 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3392 GET_MODE_BITSIZE (TYPE_MODE (type))))
3393 {
3394 tree rec_type = make_node (RECORD_TYPE);
80eaf415 3395 tree field = create_field_decl (get_identifier ("OBJ"), type,
415dddc8
RK
3396 rec_type, 1, 0, 0, 0);
3397
3398 TYPE_FIELDS (rec_type) = field;
3399 layout_type (rec_type);
3400
fbf5a39b 3401 expr = unchecked_convert (rec_type, expr, notrunc_p);
415dddc8
RK
3402 expr = build_component_ref (expr, NULL_TREE, field);
3403 }
3404
3405 /* Similarly for integral input type whose precision is not equal to its
3406 size. */
3407 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3408 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3409 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3410 {
3411 tree rec_type = make_node (RECORD_TYPE);
3412 tree field
3413 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3414 1, 0, 0, 0);
3415
3416 TYPE_FIELDS (rec_type) = field;
3417 layout_type (rec_type);
3418
dcf92453 3419 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
fbf5a39b 3420 expr = unchecked_convert (type, expr, notrunc_p);
415dddc8
RK
3421 }
3422
3423 /* We have a special case when we are converting between two
3424 unconstrained array types. In that case, take the address,
3425 convert the fat pointer types, and dereference. */
3426 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3427 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3428 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
07fc65c4 3429 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
415dddc8
RK
3430 build_unary_op (ADDR_EXPR, NULL_TREE,
3431 expr)));
415dddc8
RK
3432 else
3433 {
3434 expr = maybe_unconstrained_array (expr);
3435 etype = TREE_TYPE (expr);
07fc65c4 3436 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
415dddc8
RK
3437 }
3438
415dddc8
RK
3439 /* If the result is an integral type whose size is not equal to
3440 the size of the underlying machine type, sign- or zero-extend
3441 the result. We need not do this in the case where the input is
3442 an integral type of the same precision and signedness or if the output
3443 is a biased type or if both the input and output are unsigned. */
fbf5a39b
AC
3444 if (! notrunc_p
3445 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
415dddc8
RK
3446 && ! (TREE_CODE (type) == INTEGER_TYPE
3447 && TYPE_BIASED_REPRESENTATION_P (type))
3448 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3449 GET_MODE_BITSIZE (TYPE_MODE (type)))
3450 && ! (INTEGRAL_TYPE_P (etype)
3451 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3452 && operand_equal_p (TYPE_RM_SIZE (type),
3453 (TYPE_RM_SIZE (etype) != 0
3454 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3455 0))
3456 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3457 {
b0c48229
NB
3458 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3459 TREE_UNSIGNED (type));
415dddc8
RK
3460 tree shift_expr
3461 = convert (base_type,
3462 size_binop (MINUS_EXPR,
3463 bitsize_int
3464 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3465 TYPE_RM_SIZE (type)));
3466 expr
3467 = convert (type,
3468 build_binary_op (RSHIFT_EXPR, base_type,
3469 build_binary_op (LSHIFT_EXPR, base_type,
3470 convert (base_type, expr),
3471 shift_expr),
3472 shift_expr));
3473 }
3474
3475 /* An unchecked conversion should never raise Constraint_Error. The code
07fc65c4
GB
3476 below assumes that GCC's conversion routines overflow the same way that
3477 the underlying hardware does. This is probably true. In the rare case
3478 when it is false, we can rely on the fact that such conversions are
3479 erroneous anyway. */
415dddc8
RK
3480 if (TREE_CODE (expr) == INTEGER_CST)
3481 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3482
07fc65c4 3483 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
415dddc8 3484 show no longer constant. */
07fc65c4 3485 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
415dddc8
RK
3486 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3487 TREE_CONSTANT (expr) = 0;
3488
3489 return expr;
3490}
e2500fed
GK
3491
3492#include "gt-ada-utils.h"
3493#include "gtype-ada.h"