]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/utils.c
37a9fbd0aea9f20b1068fe6d62a7d59ae43fd49d
[thirdparty/gcc.git] / gcc / ada / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2004, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 2, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License distributed with GNAT; see file COPYING. If not, write *
19 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
20 * MA 02111-1307, USA. *
21 * *
22 * GNAT was originally developed by the GNAT team at New York University. *
23 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 * *
25 ****************************************************************************/
26
27 #include "config.h"
28 #include "system.h"
29 #include "coretypes.h"
30 #include "tm.h"
31 #include "tree.h"
32 #include "flags.h"
33 #include "defaults.h"
34 #include "toplev.h"
35 #include "output.h"
36 #include "ggc.h"
37 #include "debug.h"
38 #include "convert.h"
39 #include "target.h"
40
41 #include "ada.h"
42 #include "types.h"
43 #include "atree.h"
44 #include "elists.h"
45 #include "namet.h"
46 #include "nlists.h"
47 #include "stringt.h"
48 #include "uintp.h"
49 #include "fe.h"
50 #include "sinfo.h"
51 #include "einfo.h"
52 #include "ada-tree.h"
53 #include "gigi.h"
54
55 #ifndef MAX_FIXED_MODE_SIZE
56 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
57 #endif
58
59 #ifndef MAX_BITS_PER_WORD
60 #define MAX_BITS_PER_WORD BITS_PER_WORD
61 #endif
62
63 /* If nonzero, pretend we are allocating at global level. */
64 int force_global;
65
66 /* Tree nodes for the various types and decls we create. */
67 tree gnat_std_decls[(int) ADT_LAST];
68
69 /* Functions to call for each of the possible raise reasons. */
70 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
71
72 /* Associates a GNAT tree node to a GCC tree node. It is used in
73 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
74 of `save_gnu_tree' for more info. */
75 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
76
77 /* This listhead is used to record any global objects that need elaboration.
78 TREE_PURPOSE is the variable to be elaborated and TREE_VALUE is the
79 initial value to assign. */
80
81 static GTY(()) tree pending_elaborations;
82
83 /* This stack allows us to momentarily switch to generating elaboration
84 lists for an inner context. */
85
86 struct e_stack GTY(()) {
87 struct e_stack *next;
88 tree elab_list;
89 };
90 static GTY(()) struct e_stack *elist_stack;
91
92 /* This variable keeps a table for types for each precision so that we only
93 allocate each of them once. Signed and unsigned types are kept separate.
94
95 Note that these types are only used when fold-const requests something
96 special. Perhaps we should NOT share these types; we'll see how it
97 goes later. */
98 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
99
100 /* Likewise for float types, but record these by mode. */
101 static GTY(()) tree float_types[NUM_MACHINE_MODES];
102
103 /* For each binding contour we allocate a binding_level structure which records
104 the entities defined or declared in that contour. Contours include:
105
106 the global one
107 one for each subprogram definition
108 one for each compound statement (declare block)
109
110 Binding contours are used to create GCC tree BLOCK nodes. */
111
112 struct binding_level GTY(())
113 {
114 /* A chain of ..._DECL nodes for all variables, constants, functions,
115 parameters and type declarations. These ..._DECL nodes are chained
116 through the TREE_CHAIN field. Note that these ..._DECL nodes are stored
117 in the reverse of the order supplied to be compatible with the
118 back-end. */
119 tree names;
120 /* For each level (except the global one), a chain of BLOCK nodes for all
121 the levels that were entered and exited one level down from this one. */
122 tree blocks;
123 /* The BLOCK node for this level, if one has been preallocated.
124 If 0, the BLOCK is allocated (if needed) when the level is popped. */
125 tree this_block;
126 /* The binding level containing this one (the enclosing binding level). */
127 struct binding_level *level_chain;
128 };
129
130 /* The binding level currently in effect. */
131 static GTY(()) struct binding_level *current_binding_level;
132
133 /* A chain of binding_level structures awaiting reuse. */
134 static GTY((deletable (""))) struct binding_level *free_binding_level;
135
136 /* The outermost binding level. This binding level is created when the
137 compiler is started and it will exist through the entire compilation. */
138 static struct binding_level *global_binding_level;
139
140 /* Binding level structures are initialized by copying this one. */
141 static struct binding_level clear_binding_level = {NULL, NULL, NULL, NULL};
142
143 struct language_function GTY(())
144 {
145 int unused;
146 };
147
148 static tree merge_sizes (tree, tree, tree, int, int);
149 static tree compute_related_constant (tree, tree);
150 static tree split_plus (tree, tree *);
151 static int value_zerop (tree);
152 static tree float_type_for_precision (int, enum machine_mode);
153 static tree convert_to_fat_pointer (tree, tree);
154 static tree convert_to_thin_pointer (tree, tree);
155 static tree make_descriptor_field (const char *,tree, tree, tree);
156 static int value_factor_p (tree, int);
157 static int potential_alignment_gap (tree, tree, tree);
158 \f
159 /* Initialize the association of GNAT nodes to GCC trees. */
160
161 void
162 init_gnat_to_gnu (void)
163 {
164 associate_gnat_to_gnu
165 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
166
167 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
168 }
169
170 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
171 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
172 a ..._DECL node. If NO_CHECK is nonzero, the latter check is suppressed.
173
174 If GNU_DECL is zero, a previous association is to be reset. */
175
176 void
177 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, int no_check)
178 {
179 /* Check that GNAT_ENTITY is not already defined and that it is being set
180 to something which is a decl. Raise gigi 401 if not. Usually, this
181 means GNAT_ENTITY is defined twice, but occasionally is due to some
182 Gigi problem. */
183 if (gnu_decl
184 && (associate_gnat_to_gnu[gnat_entity - First_Node_Id]
185 || (! no_check && ! DECL_P (gnu_decl))))
186 gigi_abort (401);
187
188 associate_gnat_to_gnu[gnat_entity - First_Node_Id] = gnu_decl;
189 }
190
191 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
192 Return the ..._DECL node that was associated with it. If there is no tree
193 node associated with GNAT_ENTITY, abort.
194
195 In some cases, such as delayed elaboration or expressions that need to
196 be elaborated only once, GNAT_ENTITY is really not an entity. */
197
198 tree
199 get_gnu_tree (Entity_Id gnat_entity)
200 {
201 if (! associate_gnat_to_gnu[gnat_entity - First_Node_Id])
202 gigi_abort (402);
203
204 return associate_gnat_to_gnu[gnat_entity - First_Node_Id];
205 }
206
207 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
208
209 int
210 present_gnu_tree (Entity_Id gnat_entity)
211 {
212 return (associate_gnat_to_gnu[gnat_entity - First_Node_Id] != NULL_TREE);
213 }
214
215 \f
216 /* Return non-zero if we are currently in the global binding level. */
217
218 int
219 global_bindings_p (void)
220 {
221 return (force_global != 0 || current_binding_level == global_binding_level
222 ? -1 : 0);
223 }
224
225 /* Return the list of declarations in the current level. Note that this list
226 is in reverse order (it has to be so for back-end compatibility). */
227
228 tree
229 getdecls (void)
230 {
231 return current_binding_level->names;
232 }
233
234 /* Nonzero if the current level needs to have a BLOCK made. */
235
236 int
237 kept_level_p (void)
238 {
239 return (current_binding_level->names != 0);
240 }
241
242 /* Enter a new binding level. The input parameter is ignored, but has to be
243 specified for back-end compatibility. */
244
245 void
246 pushlevel (int ignore ATTRIBUTE_UNUSED)
247 {
248 struct binding_level *newlevel = NULL;
249
250 /* Reuse a struct for this binding level, if there is one. */
251 if (free_binding_level)
252 {
253 newlevel = free_binding_level;
254 free_binding_level = free_binding_level->level_chain;
255 }
256 else
257 newlevel
258 = (struct binding_level *) ggc_alloc (sizeof (struct binding_level));
259
260 *newlevel = clear_binding_level;
261
262 /* Add this level to the front of the chain (stack) of levels that are
263 active. */
264 newlevel->level_chain = current_binding_level;
265 current_binding_level = newlevel;
266 }
267
268 /* Exit a binding level.
269 Pop the level off, and restore the state of the identifier-decl mappings
270 that were in effect when this level was entered.
271
272 If KEEP is nonzero, this level had explicit declarations, so
273 and create a "block" (a BLOCK node) for the level
274 to record its declarations and subblocks for symbol table output.
275
276 If FUNCTIONBODY is nonzero, this level is the body of a function,
277 so create a block as if KEEP were set and also clear out all
278 label names.
279
280 If REVERSE is nonzero, reverse the order of decls before putting
281 them into the BLOCK. */
282
283 tree
284 poplevel (int keep, int reverse, int functionbody)
285 {
286 /* Points to a GCC BLOCK tree node. This is the BLOCK node construted for the
287 binding level that we are about to exit and which is returned by this
288 routine. */
289 tree block = NULL_TREE;
290 tree decl_chain;
291 tree decl_node;
292 tree subblock_chain = current_binding_level->blocks;
293 tree subblock_node;
294 int block_previously_created;
295
296 /* Reverse the list of XXXX_DECL nodes if desired. Note that the ..._DECL
297 nodes chained through the `names' field of current_binding_level are in
298 reverse order except for PARM_DECL node, which are explicitly stored in
299 the right order. */
300 current_binding_level->names
301 = decl_chain = (reverse) ? nreverse (current_binding_level->names)
302 : current_binding_level->names;
303
304 /* Output any nested inline functions within this block which must be
305 compiled because their address is needed. */
306 for (decl_node = decl_chain; decl_node; decl_node = TREE_CHAIN (decl_node))
307 if (TREE_CODE (decl_node) == FUNCTION_DECL
308 && ! TREE_ASM_WRITTEN (decl_node) && TREE_ADDRESSABLE (decl_node)
309 && DECL_INITIAL (decl_node) != 0)
310 {
311 push_function_context ();
312 output_inline_function (decl_node);
313 pop_function_context ();
314 }
315
316 block = 0;
317 block_previously_created = (current_binding_level->this_block != 0);
318 if (block_previously_created)
319 block = current_binding_level->this_block;
320 else if (keep || functionbody)
321 block = make_node (BLOCK);
322 if (block != 0)
323 {
324 BLOCK_VARS (block) = keep ? decl_chain : 0;
325 BLOCK_SUBBLOCKS (block) = subblock_chain;
326 }
327
328 /* Record the BLOCK node just built as the subblock its enclosing scope. */
329 for (subblock_node = subblock_chain; subblock_node;
330 subblock_node = TREE_CHAIN (subblock_node))
331 BLOCK_SUPERCONTEXT (subblock_node) = block;
332
333 /* Clear out the meanings of the local variables of this level. */
334
335 for (subblock_node = decl_chain; subblock_node;
336 subblock_node = TREE_CHAIN (subblock_node))
337 if (DECL_NAME (subblock_node) != 0)
338 /* If the identifier was used or addressed via a local extern decl,
339 don't forget that fact. */
340 if (DECL_EXTERNAL (subblock_node))
341 {
342 if (TREE_USED (subblock_node))
343 TREE_USED (DECL_NAME (subblock_node)) = 1;
344 if (TREE_ADDRESSABLE (subblock_node))
345 TREE_ADDRESSABLE (DECL_ASSEMBLER_NAME (subblock_node)) = 1;
346 }
347
348 {
349 /* Pop the current level, and free the structure for reuse. */
350 struct binding_level *level = current_binding_level;
351 current_binding_level = current_binding_level->level_chain;
352 level->level_chain = free_binding_level;
353 free_binding_level = level;
354 }
355
356 if (functionbody)
357 {
358 /* This is the top level block of a function. The ..._DECL chain stored
359 in BLOCK_VARS are the function's parameters (PARM_DECL nodes). Don't
360 leave them in the BLOCK because they are found in the FUNCTION_DECL
361 instead. */
362 DECL_INITIAL (current_function_decl) = block;
363 BLOCK_VARS (block) = 0;
364 }
365 else if (block)
366 {
367 if (!block_previously_created)
368 current_binding_level->blocks
369 = chainon (current_binding_level->blocks, block);
370 }
371
372 /* If we did not make a block for the level just exited, any blocks made for
373 inner levels (since they cannot be recorded as subblocks in that level)
374 must be carried forward so they will later become subblocks of something
375 else. */
376 else if (subblock_chain)
377 current_binding_level->blocks
378 = chainon (current_binding_level->blocks, subblock_chain);
379 if (block)
380 TREE_USED (block) = 1;
381
382 return block;
383 }
384 \f
385 /* Insert BLOCK at the end of the list of subblocks of the
386 current binding level. This is used when a BIND_EXPR is expanded,
387 to handle the BLOCK node inside the BIND_EXPR. */
388
389 void
390 insert_block (tree block)
391 {
392 TREE_USED (block) = 1;
393 current_binding_level->blocks
394 = chainon (current_binding_level->blocks, block);
395 }
396
397 /* Set the BLOCK node for the innermost scope
398 (the one we are currently in). */
399
400 void
401 set_block (tree block)
402 {
403 current_binding_level->this_block = block;
404 current_binding_level->names = chainon (current_binding_level->names,
405 BLOCK_VARS (block));
406 current_binding_level->blocks = chainon (current_binding_level->blocks,
407 BLOCK_SUBBLOCKS (block));
408 }
409
410 /* Records a ..._DECL node DECL as belonging to the current lexical scope.
411 Returns the ..._DECL node. */
412
413 tree
414 pushdecl (tree decl)
415 {
416 struct binding_level *b;
417
418 /* If at top level, there is no context. But PARM_DECLs always go in the
419 level of its function. */
420 if (global_bindings_p () && TREE_CODE (decl) != PARM_DECL)
421 {
422 b = global_binding_level;
423 DECL_CONTEXT (decl) = 0;
424 }
425 else
426 {
427 b = current_binding_level;
428 DECL_CONTEXT (decl) = current_function_decl;
429 }
430
431 /* Put the declaration on the list. The list of declarations is in reverse
432 order. The list will be reversed later if necessary. This needs to be
433 this way for compatibility with the back-end.
434
435 Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into the list. They
436 will cause trouble with the debugger and aren't needed anyway. */
437 if (TREE_CODE (decl) != TYPE_DECL
438 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
439 {
440 TREE_CHAIN (decl) = b->names;
441 b->names = decl;
442 }
443
444 /* For the declaration of a type, set its name if it either is not already
445 set, was set to an IDENTIFIER_NODE, indicating an internal name,
446 or if the previous type name was not derived from a source name.
447 We'd rather have the type named with a real name and all the pointer
448 types to the same object have the same POINTER_TYPE node. Code in this
449 function in c-decl.c makes a copy of the type node here, but that may
450 cause us trouble with incomplete types, so let's not try it (at least
451 for now). */
452
453 if (TREE_CODE (decl) == TYPE_DECL
454 && DECL_NAME (decl) != 0
455 && (TYPE_NAME (TREE_TYPE (decl)) == 0
456 || TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == IDENTIFIER_NODE
457 || (TREE_CODE (TYPE_NAME (TREE_TYPE (decl))) == TYPE_DECL
458 && DECL_ARTIFICIAL (TYPE_NAME (TREE_TYPE (decl)))
459 && ! DECL_ARTIFICIAL (decl))))
460 TYPE_NAME (TREE_TYPE (decl)) = decl;
461
462 return decl;
463 }
464 \f
465 /* Do little here. Set up the standard declarations later after the
466 front end has been run. */
467
468 void
469 gnat_init_decl_processing (void)
470 {
471 input_line = 0;
472
473 /* Make the binding_level structure for global names. */
474 current_function_decl = 0;
475 current_binding_level = 0;
476 free_binding_level = 0;
477 pushlevel (0);
478 global_binding_level = current_binding_level;
479
480 build_common_tree_nodes (0);
481
482 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
483 corresponding to the size of Pmode. In most cases when ptr_mode and
484 Pmode differ, C will use the width of ptr_mode as sizetype. But we get
485 far better code using the width of Pmode. Make this here since we need
486 this before we can expand the GNAT types. */
487 set_sizetype (gnat_type_for_size (GET_MODE_BITSIZE (Pmode), 0));
488 build_common_tree_nodes_2 (0);
489
490 pushdecl (build_decl (TYPE_DECL, get_identifier (SIZE_TYPE), sizetype));
491
492 /* We need to make the integer type before doing anything else.
493 We stitch this in to the appropriate GNAT type later. */
494 pushdecl (build_decl (TYPE_DECL, get_identifier ("integer"),
495 integer_type_node));
496 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned char"),
497 char_type_node));
498
499 ptr_void_type_node = build_pointer_type (void_type_node);
500
501 }
502
503 /* Create the predefined scalar types such as `integer_type_node' needed
504 in the gcc back-end and initialize the global binding level. */
505
506 void
507 init_gigi_decls (tree long_long_float_type, tree exception_type)
508 {
509 tree endlink, decl;
510 unsigned int i;
511
512 /* Set the types that GCC and Gigi use from the front end. We would like
513 to do this for char_type_node, but it needs to correspond to the C
514 char type. */
515 if (TREE_CODE (TREE_TYPE (long_long_float_type)) == INTEGER_TYPE)
516 {
517 /* In this case, the builtin floating point types are VAX float,
518 so make up a type for use. */
519 longest_float_type_node = make_node (REAL_TYPE);
520 TYPE_PRECISION (longest_float_type_node) = LONG_DOUBLE_TYPE_SIZE;
521 layout_type (longest_float_type_node);
522 pushdecl (build_decl (TYPE_DECL, get_identifier ("longest float type"),
523 longest_float_type_node));
524 }
525 else
526 longest_float_type_node = TREE_TYPE (long_long_float_type);
527
528 except_type_node = TREE_TYPE (exception_type);
529
530 unsigned_type_node = gnat_type_for_size (INT_TYPE_SIZE, 1);
531 pushdecl (build_decl (TYPE_DECL, get_identifier ("unsigned int"),
532 unsigned_type_node));
533
534 void_type_decl_node
535 = pushdecl (build_decl (TYPE_DECL, get_identifier ("void"),
536 void_type_node));
537
538 void_ftype = build_function_type (void_type_node, NULL_TREE);
539 ptr_void_ftype = build_pointer_type (void_ftype);
540
541 /* Now declare runtime functions. */
542 endlink = tree_cons (NULL_TREE, void_type_node, NULL_TREE);
543
544 /* malloc is a function declaration tree for a function to allocate
545 memory. */
546 malloc_decl = create_subprog_decl (get_identifier ("__gnat_malloc"),
547 NULL_TREE,
548 build_function_type (ptr_void_type_node,
549 tree_cons (NULL_TREE,
550 sizetype,
551 endlink)),
552 NULL_TREE, 0, 1, 1, 0);
553
554 /* free is a function declaration tree for a function to free memory. */
555 free_decl
556 = create_subprog_decl (get_identifier ("__gnat_free"), NULL_TREE,
557 build_function_type (void_type_node,
558 tree_cons (NULL_TREE,
559 ptr_void_type_node,
560 endlink)),
561 NULL_TREE, 0, 1, 1, 0);
562
563 /* Make the types and functions used for exception processing. */
564 jmpbuf_type
565 = build_array_type (gnat_type_for_mode (Pmode, 0),
566 build_index_type (build_int_2 (5, 0)));
567 pushdecl (build_decl (TYPE_DECL, get_identifier ("JMPBUF_T"), jmpbuf_type));
568 jmpbuf_ptr_type = build_pointer_type (jmpbuf_type);
569
570 /* Functions to get and set the jumpbuf pointer for the current thread. */
571 get_jmpbuf_decl
572 = create_subprog_decl
573 (get_identifier ("system__soft_links__get_jmpbuf_address_soft"),
574 NULL_TREE, build_function_type (jmpbuf_ptr_type, NULL_TREE),
575 NULL_TREE, 0, 1, 1, 0);
576
577 set_jmpbuf_decl
578 = create_subprog_decl
579 (get_identifier ("system__soft_links__set_jmpbuf_address_soft"),
580 NULL_TREE,
581 build_function_type (void_type_node,
582 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
583 NULL_TREE, 0, 1, 1, 0);
584
585 /* Function to get the current exception. */
586 get_excptr_decl
587 = create_subprog_decl
588 (get_identifier ("system__soft_links__get_gnat_exception"),
589 NULL_TREE,
590 build_function_type (build_pointer_type (except_type_node), NULL_TREE),
591 NULL_TREE, 0, 1, 1, 0);
592
593 /* Functions that raise exceptions. */
594 raise_nodefer_decl
595 = create_subprog_decl
596 (get_identifier ("__gnat_raise_nodefer_with_msg"), NULL_TREE,
597 build_function_type (void_type_node,
598 tree_cons (NULL_TREE,
599 build_pointer_type (except_type_node),
600 endlink)),
601 NULL_TREE, 0, 1, 1, 0);
602
603 /* Hooks to call when entering/leaving an exception handler. */
604 begin_handler_decl
605 = create_subprog_decl (get_identifier ("__gnat_begin_handler"), NULL_TREE,
606 build_function_type (void_type_node,
607 tree_cons (NULL_TREE,
608 ptr_void_type_node,
609 endlink)),
610 NULL_TREE, 0, 1, 1, 0);
611
612 end_handler_decl
613 = create_subprog_decl (get_identifier ("__gnat_end_handler"), NULL_TREE,
614 build_function_type (void_type_node,
615 tree_cons (NULL_TREE,
616 ptr_void_type_node,
617 endlink)),
618 NULL_TREE, 0, 1, 1, 0);
619
620 /* If in no exception handlers mode, all raise statements are redirected to
621 __gnat_last_chance_handler. No need to redefine raise_nodefer_decl, since
622 this procedure will never be called in this mode. */
623 if (No_Exception_Handlers_Set ())
624 {
625 decl
626 = create_subprog_decl
627 (get_identifier ("__gnat_last_chance_handler"), NULL_TREE,
628 build_function_type (void_type_node,
629 tree_cons (NULL_TREE,
630 build_pointer_type (char_type_node),
631 tree_cons (NULL_TREE,
632 integer_type_node,
633 endlink))),
634 NULL_TREE, 0, 1, 1, 0);
635
636 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
637 gnat_raise_decls[i] = decl;
638 }
639 else
640 /* Otherwise, make one decl for each exception reason. */
641 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
642 {
643 char name[17];
644
645 sprintf (name, "__gnat_rcheck_%.2d", i);
646 gnat_raise_decls[i]
647 = create_subprog_decl
648 (get_identifier (name), NULL_TREE,
649 build_function_type (void_type_node,
650 tree_cons (NULL_TREE,
651 build_pointer_type
652 (char_type_node),
653 tree_cons (NULL_TREE,
654 integer_type_node,
655 endlink))),
656 NULL_TREE, 0, 1, 1, 0);
657 }
658
659 /* Indicate that these never return. */
660 TREE_THIS_VOLATILE (raise_nodefer_decl) = 1;
661 TREE_SIDE_EFFECTS (raise_nodefer_decl) = 1;
662 TREE_TYPE (raise_nodefer_decl)
663 = build_qualified_type (TREE_TYPE (raise_nodefer_decl),
664 TYPE_QUAL_VOLATILE);
665
666 for (i = 0; i < ARRAY_SIZE (gnat_raise_decls); i++)
667 {
668 TREE_THIS_VOLATILE (gnat_raise_decls[i]) = 1;
669 TREE_SIDE_EFFECTS (gnat_raise_decls[i]) = 1;
670 TREE_TYPE (gnat_raise_decls[i])
671 = build_qualified_type (TREE_TYPE (gnat_raise_decls[i]),
672 TYPE_QUAL_VOLATILE);
673 }
674
675 /* setjmp returns an integer and has one operand, which is a pointer to
676 a jmpbuf. */
677 setjmp_decl
678 = create_subprog_decl
679 (get_identifier ("__builtin_setjmp"), NULL_TREE,
680 build_function_type (integer_type_node,
681 tree_cons (NULL_TREE, jmpbuf_ptr_type, endlink)),
682 NULL_TREE, 0, 1, 1, 0);
683
684 DECL_BUILT_IN_CLASS (setjmp_decl) = BUILT_IN_NORMAL;
685 DECL_FUNCTION_CODE (setjmp_decl) = BUILT_IN_SETJMP;
686
687 main_identifier_node = get_identifier ("main");
688 }
689 \f
690 /* Given a record type (RECORD_TYPE) and a chain of FIELD_DECL
691 nodes (FIELDLIST), finish constructing the record or union type.
692 If HAS_REP is nonzero, this record has a rep clause; don't call
693 layout_type but merely set the size and alignment ourselves.
694 If DEFER_DEBUG is nonzero, do not call the debugging routines
695 on this type; it will be done later. */
696
697 void
698 finish_record_type (tree record_type,
699 tree fieldlist,
700 int has_rep,
701 int defer_debug)
702 {
703 enum tree_code code = TREE_CODE (record_type);
704 tree ada_size = bitsize_zero_node;
705 tree size = bitsize_zero_node;
706 tree size_unit = size_zero_node;
707 int var_size = 0;
708 tree field;
709
710 TYPE_FIELDS (record_type) = fieldlist;
711
712 if (TYPE_NAME (record_type) != 0
713 && TREE_CODE (TYPE_NAME (record_type)) == TYPE_DECL)
714 TYPE_STUB_DECL (record_type) = TYPE_NAME (record_type);
715 else
716 TYPE_STUB_DECL (record_type)
717 = pushdecl (build_decl (TYPE_DECL, TYPE_NAME (record_type),
718 record_type));
719
720 /* We don't need both the typedef name and the record name output in
721 the debugging information, since they are the same. */
722 DECL_ARTIFICIAL (TYPE_STUB_DECL (record_type)) = 1;
723
724 /* Globally initialize the record first. If this is a rep'ed record,
725 that just means some initializations; otherwise, layout the record. */
726
727 if (has_rep)
728 {
729 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
730 TYPE_MODE (record_type) = BLKmode;
731 if (TYPE_SIZE (record_type) == 0)
732 {
733 TYPE_SIZE (record_type) = bitsize_zero_node;
734 TYPE_SIZE_UNIT (record_type) = size_zero_node;
735 }
736 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
737 out just like a UNION_TYPE, since the size will be fixed. */
738 else if (code == QUAL_UNION_TYPE)
739 code = UNION_TYPE;
740 }
741 else
742 {
743 /* Ensure there isn't a size already set. There can be in an error
744 case where there is a rep clause but all fields have errors and
745 no longer have a position. */
746 TYPE_SIZE (record_type) = 0;
747 layout_type (record_type);
748 }
749
750 /* At this point, the position and size of each field is known. It was
751 either set before entry by a rep clause, or by laying out the type above.
752
753 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
754 to compute the Ada size; the GCC size and alignment (for rep'ed records
755 that are not padding types); and the mode (for rep'ed records). We also
756 clear the DECL_BIT_FIELD indication for the cases we know have not been
757 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
758
759 if (code == QUAL_UNION_TYPE)
760 fieldlist = nreverse (fieldlist);
761
762 for (field = fieldlist; field; field = TREE_CHAIN (field))
763 {
764 tree pos = bit_position (field);
765
766 tree type = TREE_TYPE (field);
767 tree this_size = DECL_SIZE (field);
768 tree this_size_unit = DECL_SIZE_UNIT (field);
769 tree this_ada_size = DECL_SIZE (field);
770
771 /* We need to make an XVE/XVU record if any field has variable size,
772 whether or not the record does. For example, if we have an union,
773 it may be that all fields, rounded up to the alignment, have the
774 same size, in which case we'll use that size. But the debug
775 output routines (except Dwarf2) won't be able to output the fields,
776 so we need to make the special record. */
777 if (TREE_CODE (this_size) != INTEGER_CST)
778 var_size = 1;
779
780 if ((TREE_CODE (type) == RECORD_TYPE || TREE_CODE (type) == UNION_TYPE
781 || TREE_CODE (type) == QUAL_UNION_TYPE)
782 && ! TYPE_IS_FAT_POINTER_P (type)
783 && ! TYPE_CONTAINS_TEMPLATE_P (type)
784 && TYPE_ADA_SIZE (type) != 0)
785 this_ada_size = TYPE_ADA_SIZE (type);
786
787 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
788 if (DECL_BIT_FIELD (field) && !STRICT_ALIGNMENT
789 && value_factor_p (pos, BITS_PER_UNIT)
790 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
791 DECL_BIT_FIELD (field) = 0;
792
793 /* If we still have DECL_BIT_FIELD set at this point, we know the field
794 is technically not addressable. Except that it can actually be
795 addressed if the field is BLKmode and happens to be properly
796 aligned. */
797 DECL_NONADDRESSABLE_P (field)
798 |= DECL_BIT_FIELD (field) && DECL_MODE (field) != BLKmode;
799
800 if (has_rep && ! DECL_BIT_FIELD (field))
801 TYPE_ALIGN (record_type)
802 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
803
804 switch (code)
805 {
806 case UNION_TYPE:
807 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
808 size = size_binop (MAX_EXPR, size, this_size);
809 size_unit = size_binop (MAX_EXPR, size_unit, this_size_unit);
810 break;
811
812 case QUAL_UNION_TYPE:
813 ada_size
814 = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
815 this_ada_size, ada_size));
816 size = fold (build (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
817 this_size, size));
818 size_unit = fold (build (COND_EXPR, sizetype, DECL_QUALIFIER (field),
819 this_size_unit, size_unit));
820 break;
821
822 case RECORD_TYPE:
823 /* Since we know here that all fields are sorted in order of
824 increasing bit position, the size of the record is one
825 higher than the ending bit of the last field processed
826 unless we have a rep clause, since in that case we might
827 have a field outside a QUAL_UNION_TYPE that has a higher ending
828 position. So use a MAX in that case. Also, if this field is a
829 QUAL_UNION_TYPE, we need to take into account the previous size in
830 the case of empty variants. */
831 ada_size
832 = merge_sizes (ada_size, pos, this_ada_size,
833 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
834 size = merge_sizes (size, pos, this_size,
835 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
836 size_unit
837 = merge_sizes (size_unit, byte_position (field), this_size_unit,
838 TREE_CODE (type) == QUAL_UNION_TYPE, has_rep);
839 break;
840
841 default:
842 abort ();
843 }
844 }
845
846 if (code == QUAL_UNION_TYPE)
847 nreverse (fieldlist);
848
849 /* If this is a padding record, we never want to make the size smaller than
850 what was specified in it, if any. */
851 if (TREE_CODE (record_type) == RECORD_TYPE
852 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type) != 0)
853 {
854 size = TYPE_SIZE (record_type);
855 size_unit = TYPE_SIZE_UNIT (record_type);
856 }
857
858 /* Now set any of the values we've just computed that apply. */
859 if (! TYPE_IS_FAT_POINTER_P (record_type)
860 && ! TYPE_CONTAINS_TEMPLATE_P (record_type))
861 SET_TYPE_ADA_SIZE (record_type, ada_size);
862
863 if (has_rep)
864 {
865 if (! (TREE_CODE (record_type) == RECORD_TYPE
866 && TYPE_IS_PADDING_P (record_type)
867 && CONTAINS_PLACEHOLDER_P (size)))
868 {
869 TYPE_SIZE (record_type) = round_up (size, TYPE_ALIGN (record_type));
870 TYPE_SIZE_UNIT (record_type)
871 = round_up (size_unit,
872 TYPE_ALIGN (record_type) / BITS_PER_UNIT);
873 }
874
875 compute_record_mode (record_type);
876 }
877
878 if (! defer_debug)
879 {
880 /* If this record is of variable size, rename it so that the
881 debugger knows it is and make a new, parallel, record
882 that tells the debugger how the record is laid out. See
883 exp_dbug.ads. But don't do this for records that are padding
884 since they confuse GDB. */
885 if (var_size
886 && ! (TREE_CODE (record_type) == RECORD_TYPE
887 && TYPE_IS_PADDING_P (record_type)))
888 {
889 tree new_record_type
890 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
891 ? UNION_TYPE : TREE_CODE (record_type));
892 tree orig_id = DECL_NAME (TYPE_STUB_DECL (record_type));
893 tree new_id
894 = concat_id_with_name (orig_id,
895 TREE_CODE (record_type) == QUAL_UNION_TYPE
896 ? "XVU" : "XVE");
897 tree last_pos = bitsize_zero_node;
898 tree old_field;
899 tree prev_old_field = 0;
900
901 TYPE_NAME (new_record_type) = new_id;
902 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
903 TYPE_STUB_DECL (new_record_type)
904 = pushdecl (build_decl (TYPE_DECL, new_id, new_record_type));
905 DECL_ARTIFICIAL (TYPE_STUB_DECL (new_record_type)) = 1;
906 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
907 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
908 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
909
910 /* Now scan all the fields, replacing each field with a new
911 field corresponding to the new encoding. */
912 for (old_field = TYPE_FIELDS (record_type); old_field != 0;
913 old_field = TREE_CHAIN (old_field))
914 {
915 tree field_type = TREE_TYPE (old_field);
916 tree field_name = DECL_NAME (old_field);
917 tree new_field;
918 tree curpos = bit_position (old_field);
919 int var = 0;
920 unsigned int align = 0;
921 tree pos;
922
923 /* See how the position was modified from the last position.
924
925 There are two basic cases we support: a value was added
926 to the last position or the last position was rounded to
927 a boundary and they something was added. Check for the
928 first case first. If not, see if there is any evidence
929 of rounding. If so, round the last position and try
930 again.
931
932 If this is a union, the position can be taken as zero. */
933
934 if (TREE_CODE (new_record_type) == UNION_TYPE)
935 pos = bitsize_zero_node, align = 0;
936 else
937 pos = compute_related_constant (curpos, last_pos);
938
939 if (pos == 0 && TREE_CODE (curpos) == MULT_EXPR
940 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST)
941 {
942 align = TREE_INT_CST_LOW (TREE_OPERAND (curpos, 1));
943 pos = compute_related_constant (curpos,
944 round_up (last_pos, align));
945 }
946 else if (pos == 0 && TREE_CODE (curpos) == PLUS_EXPR
947 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
948 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
949 && host_integerp (TREE_OPERAND
950 (TREE_OPERAND (curpos, 0), 1),
951 1))
952 {
953 align
954 = tree_low_cst
955 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
956 pos = compute_related_constant (curpos,
957 round_up (last_pos, align));
958 }
959 else if (potential_alignment_gap (prev_old_field, old_field,
960 pos))
961 {
962 align = TYPE_ALIGN (field_type);
963 pos = compute_related_constant (curpos,
964 round_up (last_pos, align));
965 }
966
967 /* If we can't compute a position, set it to zero.
968
969 ??? We really should abort here, but it's too much work
970 to get this correct for all cases. */
971
972 if (pos == 0)
973 pos = bitsize_zero_node;
974
975 /* See if this type is variable-size and make a new type
976 and indicate the indirection if so. */
977 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
978 {
979 field_type = build_pointer_type (field_type);
980 var = 1;
981 }
982
983 /* Make a new field name, if necessary. */
984 if (var || align != 0)
985 {
986 char suffix[6];
987
988 if (align != 0)
989 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
990 align / BITS_PER_UNIT);
991 else
992 strcpy (suffix, "XVL");
993
994 field_name = concat_id_with_name (field_name, suffix);
995 }
996
997 new_field = create_field_decl (field_name, field_type,
998 new_record_type, 0,
999 DECL_SIZE (old_field), pos, 0);
1000 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1001 TYPE_FIELDS (new_record_type) = new_field;
1002
1003 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1004 zero. The only time it's not the last field of the record
1005 is when there are other components at fixed positions after
1006 it (meaning there was a rep clause for every field) and we
1007 want to be able to encode them. */
1008 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1009 (TREE_CODE (TREE_TYPE (old_field))
1010 == QUAL_UNION_TYPE)
1011 ? bitsize_zero_node
1012 : DECL_SIZE (old_field));
1013 prev_old_field = old_field;
1014 }
1015
1016 TYPE_FIELDS (new_record_type)
1017 = nreverse (TYPE_FIELDS (new_record_type));
1018
1019 rest_of_type_compilation (new_record_type, global_bindings_p ());
1020 }
1021
1022 rest_of_type_compilation (record_type, global_bindings_p ());
1023 }
1024 }
1025
1026 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1027 with FIRST_BIT and SIZE that describe a field. SPECIAL is nonzero
1028 if this represents a QUAL_UNION_TYPE in which case we must look for
1029 COND_EXPRs and replace a value of zero with the old size. If HAS_REP
1030 is nonzero, we must take the MAX of the end position of this field
1031 with LAST_SIZE. In all other cases, we use FIRST_BIT plus SIZE.
1032
1033 We return an expression for the size. */
1034
1035 static tree
1036 merge_sizes (tree last_size,
1037 tree first_bit,
1038 tree size,
1039 int special,
1040 int has_rep)
1041 {
1042 tree type = TREE_TYPE (last_size);
1043 tree new;
1044
1045 if (! special || TREE_CODE (size) != COND_EXPR)
1046 {
1047 new = size_binop (PLUS_EXPR, first_bit, size);
1048 if (has_rep)
1049 new = size_binop (MAX_EXPR, last_size, new);
1050 }
1051
1052 else
1053 new = fold (build (COND_EXPR, type, TREE_OPERAND (size, 0),
1054 integer_zerop (TREE_OPERAND (size, 1))
1055 ? last_size : merge_sizes (last_size, first_bit,
1056 TREE_OPERAND (size, 1),
1057 1, has_rep),
1058 integer_zerop (TREE_OPERAND (size, 2))
1059 ? last_size : merge_sizes (last_size, first_bit,
1060 TREE_OPERAND (size, 2),
1061 1, has_rep)));
1062
1063 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1064 when fed through substitute_in_expr) into thinking that a constant
1065 size is not constant. */
1066 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1067 new = TREE_OPERAND (new, 0);
1068
1069 return new;
1070 }
1071
1072 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1073 related by the addition of a constant. Return that constant if so. */
1074
1075 static tree
1076 compute_related_constant (tree op0, tree op1)
1077 {
1078 tree op0_var, op1_var;
1079 tree op0_con = split_plus (op0, &op0_var);
1080 tree op1_con = split_plus (op1, &op1_var);
1081 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1082
1083 if (operand_equal_p (op0_var, op1_var, 0))
1084 return result;
1085 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1086 return result;
1087 else
1088 return 0;
1089 }
1090
1091 /* Utility function of above to split a tree OP which may be a sum, into a
1092 constant part, which is returned, and a variable part, which is stored
1093 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1094 bitsizetype. */
1095
1096 static tree
1097 split_plus (tree in, tree *pvar)
1098 {
1099 /* Strip NOPS in order to ease the tree traversal and maximize the
1100 potential for constant or plus/minus discovery. We need to be careful
1101 to always return and set *pvar to bitsizetype trees, but it's worth
1102 the effort. */
1103 STRIP_NOPS (in);
1104
1105 *pvar = convert (bitsizetype, in);
1106
1107 if (TREE_CODE (in) == INTEGER_CST)
1108 {
1109 *pvar = bitsize_zero_node;
1110 return convert (bitsizetype, in);
1111 }
1112 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1113 {
1114 tree lhs_var, rhs_var;
1115 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1116 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1117
1118 if (lhs_var == TREE_OPERAND (in, 0)
1119 && rhs_var == TREE_OPERAND (in, 1))
1120 return bitsize_zero_node;
1121
1122 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1123 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1124 }
1125 else
1126 return bitsize_zero_node;
1127 }
1128 \f
1129 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1130 subprogram. If it is void_type_node, then we are dealing with a procedure,
1131 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1132 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1133 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1134 RETURNS_UNCONSTRAINED is nonzero if the function returns an unconstrained
1135 object. RETURNS_BY_REF is nonzero if the function returns by reference.
1136 RETURNS_WITH_DSP is nonzero if the function is to return with a
1137 depressed stack pointer. */
1138
1139 tree
1140 create_subprog_type (tree return_type,
1141 tree param_decl_list,
1142 tree cico_list,
1143 int returns_unconstrained,
1144 int returns_by_ref,
1145 int returns_with_dsp)
1146 {
1147 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1148 the subprogram formal parameters. This list is generated by traversing the
1149 input list of PARM_DECL nodes. */
1150 tree param_type_list = NULL;
1151 tree param_decl;
1152 tree type;
1153
1154 for (param_decl = param_decl_list; param_decl;
1155 param_decl = TREE_CHAIN (param_decl))
1156 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1157 param_type_list);
1158
1159 /* The list of the function parameter types has to be terminated by the void
1160 type to signal to the back-end that we are not dealing with a variable
1161 parameter subprogram, but that the subprogram has a fixed number of
1162 parameters. */
1163 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1164
1165 /* The list of argument types has been created in reverse
1166 so nreverse it. */
1167 param_type_list = nreverse (param_type_list);
1168
1169 type = build_function_type (return_type, param_type_list);
1170
1171 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1172 or the new type should, make a copy of TYPE. Likewise for
1173 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1174 if (TYPE_CI_CO_LIST (type) != 0 || cico_list != 0
1175 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1176 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref)
1177 type = copy_type (type);
1178
1179 SET_TYPE_CI_CO_LIST (type, cico_list);
1180 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1181 TYPE_RETURNS_STACK_DEPRESSED (type) = returns_with_dsp;
1182 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1183 return type;
1184 }
1185 \f
1186 /* Return a copy of TYPE but safe to modify in any way. */
1187
1188 tree
1189 copy_type (tree type)
1190 {
1191 tree new = copy_node (type);
1192
1193 /* copy_node clears this field instead of copying it, because it is
1194 aliased with TREE_CHAIN. */
1195 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1196
1197 TYPE_POINTER_TO (new) = 0;
1198 TYPE_REFERENCE_TO (new) = 0;
1199 TYPE_MAIN_VARIANT (new) = new;
1200 TYPE_NEXT_VARIANT (new) = 0;
1201
1202 return new;
1203 }
1204 \f
1205 /* Return an INTEGER_TYPE of SIZETYPE with range MIN to MAX and whose
1206 TYPE_INDEX_TYPE is INDEX. */
1207
1208 tree
1209 create_index_type (tree min, tree max, tree index)
1210 {
1211 /* First build a type for the desired range. */
1212 tree type = build_index_2_type (min, max);
1213
1214 /* If this type has the TYPE_INDEX_TYPE we want, return it. Otherwise, if it
1215 doesn't have TYPE_INDEX_TYPE set, set it to INDEX. If TYPE_INDEX_TYPE
1216 is set, but not to INDEX, make a copy of this type with the requested
1217 index type. Note that we have no way of sharing these types, but that's
1218 only a small hole. */
1219 if (TYPE_INDEX_TYPE (type) == index)
1220 return type;
1221 else if (TYPE_INDEX_TYPE (type) != 0)
1222 type = copy_type (type);
1223
1224 SET_TYPE_INDEX_TYPE (type, index);
1225 return type;
1226 }
1227 \f
1228 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type (a character
1229 string) and TYPE is a ..._TYPE node giving its data type.
1230 ARTIFICIAL_P is nonzero if this is a declaration that was generated
1231 by the compiler. DEBUG_INFO_P is nonzero if we need to write debugging
1232 information about this type. */
1233
1234 tree
1235 create_type_decl (tree type_name,
1236 tree type,
1237 struct attrib *attr_list,
1238 int artificial_p,
1239 int debug_info_p)
1240 {
1241 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1242 enum tree_code code = TREE_CODE (type);
1243
1244 DECL_ARTIFICIAL (type_decl) = artificial_p;
1245 pushdecl (type_decl);
1246 process_attributes (type_decl, attr_list);
1247
1248 /* Pass type declaration information to the debugger unless this is an
1249 UNCONSTRAINED_ARRAY_TYPE, which the debugger does not support,
1250 and ENUMERAL_TYPE or RECORD_TYPE which is handled separately,
1251 a dummy type, which will be completed later, or a type for which
1252 debugging information was not requested. */
1253 if (code == UNCONSTRAINED_ARRAY_TYPE || TYPE_IS_DUMMY_P (type)
1254 || ! debug_info_p)
1255 DECL_IGNORED_P (type_decl) = 1;
1256 else if (code != ENUMERAL_TYPE && code != RECORD_TYPE
1257 && ! ((code == POINTER_TYPE || code == REFERENCE_TYPE)
1258 && TYPE_IS_DUMMY_P (TREE_TYPE (type))))
1259 rest_of_decl_compilation (type_decl, NULL, global_bindings_p (), 0);
1260
1261 return type_decl;
1262 }
1263
1264 /* Returns a GCC VAR_DECL node. VAR_NAME gives the name of the variable.
1265 ASM_NAME is its assembler name (if provided). TYPE is its data type
1266 (a GCC ..._TYPE node). VAR_INIT is the GCC tree for an optional initial
1267 expression; NULL_TREE if none.
1268
1269 CONST_FLAG is nonzero if this variable is constant.
1270
1271 PUBLIC_FLAG is nonzero if this definition is to be made visible outside of
1272 the current compilation unit. This flag should be set when processing the
1273 variable definitions in a package specification. EXTERN_FLAG is nonzero
1274 when processing an external variable declaration (as opposed to a
1275 definition: no storage is to be allocated for the variable here).
1276
1277 STATIC_FLAG is only relevant when not at top level. In that case
1278 it indicates whether to always allocate storage to the variable. */
1279
1280 tree
1281 create_var_decl (tree var_name,
1282 tree asm_name,
1283 tree type,
1284 tree var_init,
1285 int const_flag,
1286 int public_flag,
1287 int extern_flag,
1288 int static_flag,
1289 struct attrib *attr_list)
1290 {
1291 int init_const
1292 = (var_init == 0
1293 ? 0
1294 : (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1295 && (global_bindings_p () || static_flag
1296 ? 0 != initializer_constant_valid_p (var_init,
1297 TREE_TYPE (var_init))
1298 : TREE_CONSTANT (var_init))));
1299 tree var_decl
1300 = build_decl ((const_flag && init_const
1301 /* Only make a CONST_DECL for sufficiently-small objects.
1302 We consider complex double "sufficiently-small" */
1303 && TYPE_SIZE (type) != 0
1304 && host_integerp (TYPE_SIZE_UNIT (type), 1)
1305 && 0 >= compare_tree_int (TYPE_SIZE_UNIT (type),
1306 GET_MODE_SIZE (DCmode)))
1307 ? CONST_DECL : VAR_DECL, var_name, type);
1308 tree assign_init = 0;
1309
1310 /* If this is external, throw away any initializations unless this is a
1311 CONST_DECL (meaning we have a constant); they will be done elsewhere. If
1312 we are defining a global here, leave a constant initialization and save
1313 any variable elaborations for the elaboration routine. Otherwise, if
1314 the initializing expression is not the same as TYPE, generate the
1315 initialization with an assignment statement, since it knows how
1316 to do the required adjustents. If we are just annotating types,
1317 throw away the initialization if it isn't a constant. */
1318
1319 if ((extern_flag && TREE_CODE (var_decl) != CONST_DECL)
1320 || (type_annotate_only && var_init != 0 && ! TREE_CONSTANT (var_init)))
1321 var_init = 0;
1322
1323 if (global_bindings_p () && var_init != 0 && ! init_const)
1324 {
1325 add_pending_elaborations (var_decl, var_init);
1326 var_init = 0;
1327 }
1328
1329 else if (var_init != 0
1330 && ((TYPE_MAIN_VARIANT (TREE_TYPE (var_init))
1331 != TYPE_MAIN_VARIANT (type))
1332 || (static_flag && ! init_const)))
1333 assign_init = var_init, var_init = 0;
1334
1335 DECL_COMMON (var_decl) = !flag_no_common;
1336 DECL_INITIAL (var_decl) = var_init;
1337 TREE_READONLY (var_decl) = const_flag;
1338 DECL_EXTERNAL (var_decl) = extern_flag;
1339 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1340 TREE_CONSTANT (var_decl) = TREE_CODE (var_decl) == CONST_DECL;
1341 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1342 = TYPE_VOLATILE (type);
1343
1344 /* At the global binding level we need to allocate static storage for the
1345 variable if and only if its not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl) = global_bindings_p () ? !extern_flag : static_flag;
1348
1349 if (asm_name != 0)
1350 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1351
1352 process_attributes (var_decl, attr_list);
1353
1354 /* Add this decl to the current binding level and generate any
1355 needed code and RTL. */
1356 var_decl = pushdecl (var_decl);
1357 expand_decl (var_decl);
1358
1359 if (DECL_CONTEXT (var_decl) != 0)
1360 expand_decl_init (var_decl);
1361
1362 /* If this is volatile, force it into memory. */
1363 if (TREE_SIDE_EFFECTS (var_decl))
1364 gnat_mark_addressable (var_decl);
1365
1366 if (TREE_CODE (var_decl) != CONST_DECL)
1367 rest_of_decl_compilation (var_decl, 0, global_bindings_p (), 0);
1368
1369 if (assign_init != 0)
1370 {
1371 /* If VAR_DECL has a padded type, convert it to the unpadded
1372 type so the assignment is done properly. */
1373 tree lhs = var_decl;
1374
1375 if (TREE_CODE (TREE_TYPE (lhs)) == RECORD_TYPE
1376 && TYPE_IS_PADDING_P (TREE_TYPE (lhs)))
1377 lhs = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (lhs))), lhs);
1378
1379 expand_expr_stmt (build_binary_op (MODIFY_EXPR, NULL_TREE, lhs,
1380 assign_init));
1381 }
1382
1383 return var_decl;
1384 }
1385 \f
1386 /* Returns a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1387 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1388 this field is in a record type with a "pragma pack". If SIZE is nonzero
1389 it is the specified size for this field. If POS is nonzero, it is the bit
1390 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1391 the address of this field for aliasing purposes. */
1392
1393 tree
1394 create_field_decl (tree field_name,
1395 tree field_type,
1396 tree record_type,
1397 int packed,
1398 tree size,
1399 tree pos,
1400 int addressable)
1401 {
1402 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1403
1404 DECL_CONTEXT (field_decl) = record_type;
1405 TREE_READONLY (field_decl) = TREE_READONLY (field_type);
1406
1407 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1408 byte boundary since GCC cannot handle less-aligned BLKmode bitfields. */
1409 if (packed && TYPE_MODE (field_type) == BLKmode)
1410 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1411
1412 /* If a size is specified, use it. Otherwise, if the record type is packed
1413 compute a size to use, which may differ from the object's natural size.
1414 We always set a size in this case to trigger the checks for bitfield
1415 creation below, which is typically required when no position has been
1416 specified. */
1417 if (size != 0)
1418 size = convert (bitsizetype, size);
1419 else if (packed == 1)
1420 {
1421 size = rm_size (field_type);
1422
1423 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1424 byte. */
1425 if (TREE_CODE (size) == INTEGER_CST
1426 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1427 size = round_up (size, BITS_PER_UNIT);
1428 }
1429
1430 /* Make a bitfield if a size is specified for two reasons: first if the size
1431 differs from the natural size. Second, if the alignment is insufficient.
1432 There are a number of ways the latter can be true.
1433
1434 We never make a bitfield if the type of the field has a nonconstant size,
1435 or if it is claimed to be addressable, because no such entity requiring
1436 bitfield operations should reach here.
1437
1438 We do *preventively* make a bitfield when there might be the need for it
1439 but we don't have all the necessary information to decide, as is the case
1440 of a field with no specified position in a packed record.
1441
1442 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1443 in layout_decl or finish_record_type to clear the bit_field indication if
1444 it is in fact not needed. */
1445 if (size != 0 && TREE_CODE (size) == INTEGER_CST
1446 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1447 && ! addressable
1448 && (! operand_equal_p (TYPE_SIZE (field_type), size, 0)
1449 || (pos != 0
1450 && ! value_zerop (size_binop (TRUNC_MOD_EXPR, pos,
1451 bitsize_int (TYPE_ALIGN
1452 (field_type)))))
1453 || packed
1454 || (TYPE_ALIGN (record_type) != 0
1455 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1456 {
1457 DECL_BIT_FIELD (field_decl) = 1;
1458 DECL_SIZE (field_decl) = size;
1459 if (! packed && pos == 0)
1460 DECL_ALIGN (field_decl)
1461 = (TYPE_ALIGN (record_type) != 0
1462 ? MIN (TYPE_ALIGN (record_type), TYPE_ALIGN (field_type))
1463 : TYPE_ALIGN (field_type));
1464 }
1465
1466 DECL_PACKED (field_decl) = pos != 0 ? DECL_BIT_FIELD (field_decl) : packed;
1467 DECL_ALIGN (field_decl)
1468 = MAX (DECL_ALIGN (field_decl),
1469 DECL_BIT_FIELD (field_decl) ? 1
1470 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT
1471 : TYPE_ALIGN (field_type));
1472
1473 if (pos != 0)
1474 {
1475 /* We need to pass in the alignment the DECL is known to have.
1476 This is the lowest-order bit set in POS, but no more than
1477 the alignment of the record, if one is specified. Note
1478 that an alignment of 0 is taken as infinite. */
1479 unsigned int known_align;
1480
1481 if (host_integerp (pos, 1))
1482 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1483 else
1484 known_align = BITS_PER_UNIT;
1485
1486 if (TYPE_ALIGN (record_type)
1487 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1488 known_align = TYPE_ALIGN (record_type);
1489
1490 layout_decl (field_decl, known_align);
1491 SET_DECL_OFFSET_ALIGN (field_decl,
1492 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1493 : BITS_PER_UNIT);
1494 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1495 &DECL_FIELD_BIT_OFFSET (field_decl),
1496 DECL_OFFSET_ALIGN (field_decl), pos);
1497
1498 DECL_HAS_REP_P (field_decl) = 1;
1499 }
1500
1501 /* If the field type is passed by reference, we will have pointers to the
1502 field, so it is addressable. */
1503 if (must_pass_by_ref (field_type) || default_pass_by_ref (field_type))
1504 addressable = 1;
1505
1506 /* ??? For now, we say that any field of aggregate type is addressable
1507 because the front end may take 'Reference of it. */
1508 if (AGGREGATE_TYPE_P (field_type))
1509 addressable = 1;
1510
1511 /* Mark the decl as nonaddressable if it is indicated so semantically,
1512 meaning we won't ever attempt to take the address of the field.
1513
1514 It may also be "technically" nonaddressable, meaning that even if we
1515 attempt to take the field's address we will actually get the address of a
1516 copy. This is the case for true bitfields, but the DECL_BIT_FIELD value
1517 we have at this point is not accurate enough, so we don't account for
1518 this here and let finish_record_type decide. */
1519 DECL_NONADDRESSABLE_P (field_decl) = ! addressable;
1520
1521 return field_decl;
1522 }
1523
1524 /* Subroutine of previous function: return nonzero if EXP, ignoring any side
1525 effects, has the value of zero. */
1526
1527 static int
1528 value_zerop (tree exp)
1529 {
1530 if (TREE_CODE (exp) == COMPOUND_EXPR)
1531 return value_zerop (TREE_OPERAND (exp, 1));
1532
1533 return integer_zerop (exp);
1534 }
1535 \f
1536 /* Returns a PARM_DECL node. PARAM_NAME is the name of the parameter,
1537 PARAM_TYPE is its type. READONLY is nonzero if the parameter is
1538 readonly (either an IN parameter or an address of a pass-by-ref
1539 parameter). */
1540
1541 tree
1542 create_param_decl (tree param_name, tree param_type, int readonly)
1543 {
1544 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1545
1546 /* Honor targetm.calls.promote_prototypes(), as not doing so can
1547 lead to various ABI violations. */
1548 if (targetm.calls.promote_prototypes (param_type)
1549 && (TREE_CODE (param_type) == INTEGER_TYPE
1550 || TREE_CODE (param_type) == ENUMERAL_TYPE)
1551 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1552 {
1553 /* We have to be careful about biased types here. Make a subtype
1554 of integer_type_node with the proper biasing. */
1555 if (TREE_CODE (param_type) == INTEGER_TYPE
1556 && TYPE_BIASED_REPRESENTATION_P (param_type))
1557 {
1558 param_type
1559 = copy_type (build_range_type (integer_type_node,
1560 TYPE_MIN_VALUE (param_type),
1561 TYPE_MAX_VALUE (param_type)));
1562
1563 TYPE_BIASED_REPRESENTATION_P (param_type) = 1;
1564 }
1565 else
1566 param_type = integer_type_node;
1567 }
1568
1569 DECL_ARG_TYPE (param_decl) = param_type;
1570 DECL_ARG_TYPE_AS_WRITTEN (param_decl) = param_type;
1571 TREE_READONLY (param_decl) = readonly;
1572 return param_decl;
1573 }
1574 \f
1575 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1576
1577 void
1578 process_attributes (tree decl, struct attrib *attr_list)
1579 {
1580 for (; attr_list; attr_list = attr_list->next)
1581 switch (attr_list->type)
1582 {
1583 case ATTR_MACHINE_ATTRIBUTE:
1584 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->arg,
1585 NULL_TREE),
1586 ATTR_FLAG_TYPE_IN_PLACE);
1587 break;
1588
1589 case ATTR_LINK_ALIAS:
1590 TREE_STATIC (decl) = 1;
1591 assemble_alias (decl, attr_list->name);
1592 break;
1593
1594 case ATTR_WEAK_EXTERNAL:
1595 if (SUPPORTS_WEAK)
1596 declare_weak (decl);
1597 else
1598 post_error ("?weak declarations not supported on this target",
1599 attr_list->error_point);
1600 break;
1601
1602 case ATTR_LINK_SECTION:
1603 if (targetm.have_named_sections)
1604 {
1605 DECL_SECTION_NAME (decl)
1606 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1607 IDENTIFIER_POINTER (attr_list->name));
1608 DECL_COMMON (decl) = 0;
1609 }
1610 else
1611 post_error ("?section attributes are not supported for this target",
1612 attr_list->error_point);
1613 break;
1614 }
1615 }
1616 \f
1617 /* Add some pending elaborations on the list. */
1618
1619 void
1620 add_pending_elaborations (tree var_decl, tree var_init)
1621 {
1622 if (var_init != 0)
1623 Check_Elaboration_Code_Allowed (error_gnat_node);
1624
1625 pending_elaborations
1626 = chainon (pending_elaborations, build_tree_list (var_decl, var_init));
1627 }
1628
1629 /* Obtain any pending elaborations and clear the old list. */
1630
1631 tree
1632 get_pending_elaborations (void)
1633 {
1634 /* Each thing added to the list went on the end; we want it on the
1635 beginning. */
1636 tree result = TREE_CHAIN (pending_elaborations);
1637
1638 TREE_CHAIN (pending_elaborations) = 0;
1639 return result;
1640 }
1641
1642 /* Return true if VALUE is a multiple of FACTOR. FACTOR must be a power
1643 of 2. */
1644
1645 static int
1646 value_factor_p (tree value, int factor)
1647 {
1648 if (host_integerp (value, 1))
1649 return tree_low_cst (value, 1) % factor == 0;
1650
1651 if (TREE_CODE (value) == MULT_EXPR)
1652 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1653 || value_factor_p (TREE_OPERAND (value, 1), factor));
1654
1655 return 0;
1656 }
1657
1658 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1659 unless we can prove these 2 fields are laid out in such a way that no gap
1660 exist between the end of PREV_FIELD and the begining of CURR_FIELD. OFFSET
1661 is the distance in bits between the end of PREV_FIELD and the starting
1662 position of CURR_FIELD. It is ignored if null. */
1663
1664 static int
1665 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1666 {
1667 /* If this is the first field of the record, there cannot be any gap */
1668 if (!prev_field)
1669 return 0;
1670
1671 /* If the previous field is a union type, then return False: The only
1672 time when such a field is not the last field of the record is when
1673 there are other components at fixed positions after it (meaning there
1674 was a rep clause for every field), in which case we don't want the
1675 alignment constraint to override them. */
1676 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1677 return 0;
1678
1679 /* If the distance between the end of prev_field and the begining of
1680 curr_field is constant, then there is a gap if the value of this
1681 constant is not null. */
1682 if (offset && host_integerp (offset, 1))
1683 return (!integer_zerop (offset));
1684
1685 /* If the size and position of the previous field are constant,
1686 then check the sum of this size and position. There will be a gap
1687 iff it is not multiple of the current field alignment. */
1688 if (host_integerp (DECL_SIZE (prev_field), 1)
1689 && host_integerp (bit_position (prev_field), 1))
1690 return ((tree_low_cst (bit_position (prev_field), 1)
1691 + tree_low_cst (DECL_SIZE (prev_field), 1))
1692 % DECL_ALIGN (curr_field) != 0);
1693
1694 /* If both the position and size of the previous field are multiples
1695 of the current field alignment, there can not be any gap. */
1696 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1697 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1698 return 0;
1699
1700 /* Fallback, return that there may be a potential gap */
1701 return 1;
1702 }
1703
1704 /* Return nonzero if there are pending elaborations. */
1705
1706 int
1707 pending_elaborations_p (void)
1708 {
1709 return TREE_CHAIN (pending_elaborations) != 0;
1710 }
1711
1712 /* Save a copy of the current pending elaboration list and make a new
1713 one. */
1714
1715 void
1716 push_pending_elaborations (void)
1717 {
1718 struct e_stack *p = (struct e_stack *) ggc_alloc (sizeof (struct e_stack));
1719
1720 p->next = elist_stack;
1721 p->elab_list = pending_elaborations;
1722 elist_stack = p;
1723 pending_elaborations = build_tree_list (NULL_TREE, NULL_TREE);
1724 }
1725
1726 /* Pop the stack of pending elaborations. */
1727
1728 void
1729 pop_pending_elaborations (void)
1730 {
1731 struct e_stack *p = elist_stack;
1732
1733 pending_elaborations = p->elab_list;
1734 elist_stack = p->next;
1735 }
1736
1737 /* Return the current position in pending_elaborations so we can insert
1738 elaborations after that point. */
1739
1740 tree
1741 get_elaboration_location (void)
1742 {
1743 return tree_last (pending_elaborations);
1744 }
1745
1746 /* Insert the current elaborations after ELAB, which is in some elaboration
1747 list. */
1748
1749 void
1750 insert_elaboration_list (tree elab)
1751 {
1752 tree next = TREE_CHAIN (elab);
1753
1754 if (TREE_CHAIN (pending_elaborations))
1755 {
1756 TREE_CHAIN (elab) = TREE_CHAIN (pending_elaborations);
1757 TREE_CHAIN (tree_last (pending_elaborations)) = next;
1758 TREE_CHAIN (pending_elaborations) = 0;
1759 }
1760 }
1761
1762 /* Returns a LABEL_DECL node for LABEL_NAME. */
1763
1764 tree
1765 create_label_decl (tree label_name)
1766 {
1767 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1768
1769 DECL_CONTEXT (label_decl) = current_function_decl;
1770 DECL_MODE (label_decl) = VOIDmode;
1771 DECL_SOURCE_LOCATION (label_decl) = input_location;
1772
1773 return label_decl;
1774 }
1775 \f
1776 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1777 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1778 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1779 PARM_DECL nodes chained through the TREE_CHAIN field).
1780
1781 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1782 appropriate fields in the FUNCTION_DECL. */
1783
1784 tree
1785 create_subprog_decl (tree subprog_name,
1786 tree asm_name,
1787 tree subprog_type,
1788 tree param_decl_list,
1789 int inline_flag,
1790 int public_flag,
1791 int extern_flag,
1792 struct attrib *attr_list)
1793 {
1794 tree return_type = TREE_TYPE (subprog_type);
1795 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1796
1797 /* If this is a function nested inside an inlined external function, it
1798 means we aren't going to compile the outer function unless it is
1799 actually inlined, so do the same for us. */
1800 if (current_function_decl != 0 && DECL_INLINE (current_function_decl)
1801 && DECL_EXTERNAL (current_function_decl))
1802 extern_flag = 1;
1803
1804 DECL_EXTERNAL (subprog_decl) = extern_flag;
1805 TREE_PUBLIC (subprog_decl) = public_flag;
1806 DECL_INLINE (subprog_decl) = inline_flag;
1807 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1808 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1809 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1810 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1811 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1812
1813 if (asm_name != 0)
1814 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1815
1816 process_attributes (subprog_decl, attr_list);
1817
1818 /* Add this decl to the current binding level. */
1819 subprog_decl = pushdecl (subprog_decl);
1820
1821 /* Output the assembler code and/or RTL for the declaration. */
1822 rest_of_decl_compilation (subprog_decl, 0, global_bindings_p (), 0);
1823
1824 return subprog_decl;
1825 }
1826 \f
1827 /* Count how deep we are into nested functions. This is because
1828 we shouldn't call the backend function context routines unless we
1829 are in a nested function. */
1830
1831 static int function_nesting_depth;
1832
1833 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1834 body. This routine needs to be invoked before processing the declarations
1835 appearing in the subprogram. */
1836
1837 void
1838 begin_subprog_body (tree subprog_decl)
1839 {
1840 tree param_decl_list;
1841 tree param_decl;
1842 tree next_param;
1843
1844 if (function_nesting_depth++ != 0)
1845 push_function_context ();
1846
1847 announce_function (subprog_decl);
1848
1849 /* Make this field nonzero so further routines know that this is not
1850 tentative. error_mark_node is replaced below (in poplevel) with the
1851 adequate BLOCK. */
1852 DECL_INITIAL (subprog_decl) = error_mark_node;
1853
1854 /* This function exists in static storage. This does not mean `static' in
1855 the C sense! */
1856 TREE_STATIC (subprog_decl) = 1;
1857
1858 /* Enter a new binding level. */
1859 current_function_decl = subprog_decl;
1860 pushlevel (0);
1861
1862 /* Push all the PARM_DECL nodes onto the current scope (i.e. the scope of the
1863 subprogram body) so that they can be recognized as local variables in the
1864 subprogram.
1865
1866 The list of PARM_DECL nodes is stored in the right order in
1867 DECL_ARGUMENTS. Since ..._DECL nodes get stored in the reverse order in
1868 which they are transmitted to `pushdecl' we need to reverse the list of
1869 PARM_DECLs if we want it to be stored in the right order. The reason why
1870 we want to make sure the PARM_DECLs are stored in the correct order is
1871 that this list will be retrieved in a few lines with a call to `getdecl'
1872 to store it back into the DECL_ARGUMENTS field. */
1873 param_decl_list = nreverse (DECL_ARGUMENTS (subprog_decl));
1874
1875 for (param_decl = param_decl_list; param_decl; param_decl = next_param)
1876 {
1877 next_param = TREE_CHAIN (param_decl);
1878 TREE_CHAIN (param_decl) = NULL;
1879 pushdecl (param_decl);
1880 }
1881
1882 /* Store back the PARM_DECL nodes. They appear in the right order. */
1883 DECL_ARGUMENTS (subprog_decl) = getdecls ();
1884
1885 init_function_start (subprog_decl);
1886 expand_function_start (subprog_decl, 0);
1887
1888 /* If this function is `main', emit a call to `__main'
1889 to run global initializers, etc. */
1890 if (DECL_ASSEMBLER_NAME (subprog_decl) != 0
1891 && MAIN_NAME_P (DECL_ASSEMBLER_NAME (subprog_decl))
1892 && DECL_CONTEXT (subprog_decl) == NULL_TREE)
1893 expand_main_function ();
1894 }
1895
1896 /* Finish the definition of the current subprogram and compile it all the way
1897 to assembler language output. */
1898
1899 void
1900 end_subprog_body (void)
1901 {
1902 tree decl;
1903 tree cico_list;
1904
1905 poplevel (1, 0, 1);
1906 BLOCK_SUPERCONTEXT (DECL_INITIAL (current_function_decl))
1907 = current_function_decl;
1908
1909 /* Mark the RESULT_DECL as being in this subprogram. */
1910 DECL_CONTEXT (DECL_RESULT (current_function_decl)) = current_function_decl;
1911
1912 expand_function_end ();
1913
1914 /* If this is a nested function, push a new GC context. That will keep
1915 local variables on the stack from being collected while we're doing
1916 the compilation of this function. */
1917 if (function_nesting_depth > 1)
1918 ggc_push_context ();
1919
1920 /* If we're only annotating types, don't actually compile this
1921 function. */
1922 if (!type_annotate_only)
1923 rest_of_compilation (current_function_decl);
1924
1925 if (function_nesting_depth > 1)
1926 ggc_pop_context ();
1927
1928 /* Throw away any VAR_DECLs we made for OUT parameters; they must
1929 not be seen when we call this function and will be in
1930 unallocated memory anyway. */
1931 for (cico_list = TYPE_CI_CO_LIST (TREE_TYPE (current_function_decl));
1932 cico_list != 0; cico_list = TREE_CHAIN (cico_list))
1933 TREE_VALUE (cico_list) = 0;
1934
1935 if (DECL_STRUCT_FUNCTION (current_function_decl) == 0)
1936 {
1937 /* Throw away DECL_RTL in any PARM_DECLs unless this function
1938 was saved for inline, in which case the DECL_RTLs are in
1939 preserved memory. */
1940 for (decl = DECL_ARGUMENTS (current_function_decl);
1941 decl != 0; decl = TREE_CHAIN (decl))
1942 {
1943 SET_DECL_RTL (decl, 0);
1944 DECL_INCOMING_RTL (decl) = 0;
1945 }
1946
1947 /* Similarly, discard DECL_RTL of the return value. */
1948 SET_DECL_RTL (DECL_RESULT (current_function_decl), 0);
1949
1950 /* But DECL_INITIAL must remain nonzero so we know this
1951 was an actual function definition unless toplev.c decided not
1952 to inline it. */
1953 if (DECL_INITIAL (current_function_decl) != 0)
1954 DECL_INITIAL (current_function_decl) = error_mark_node;
1955
1956 DECL_ARGUMENTS (current_function_decl) = 0;
1957 }
1958
1959 /* If we are not at the bottom of the function nesting stack, pop up to
1960 the containing function. Otherwise show we aren't in any function. */
1961 if (--function_nesting_depth != 0)
1962 pop_function_context ();
1963 else
1964 current_function_decl = 0;
1965 }
1966 \f
1967 /* Return a definition for a builtin function named NAME and whose data type
1968 is TYPE. TYPE should be a function type with argument types.
1969 FUNCTION_CODE tells later passes how to compile calls to this function.
1970 See tree.h for its possible values.
1971
1972 If LIBRARY_NAME is nonzero, use that for DECL_ASSEMBLER_NAME,
1973 the name to be called if we can't opencode the function. If
1974 ATTRS is nonzero, use that for the function attribute list. */
1975
1976 tree
1977 builtin_function (const char *name,
1978 tree type,
1979 int function_code,
1980 enum built_in_class class,
1981 const char *library_name,
1982 tree attrs)
1983 {
1984 tree decl = build_decl (FUNCTION_DECL, get_identifier (name), type);
1985
1986 DECL_EXTERNAL (decl) = 1;
1987 TREE_PUBLIC (decl) = 1;
1988 if (library_name)
1989 SET_DECL_ASSEMBLER_NAME (decl, get_identifier (library_name));
1990
1991 pushdecl (decl);
1992 DECL_BUILT_IN_CLASS (decl) = class;
1993 DECL_FUNCTION_CODE (decl) = function_code;
1994 if (attrs)
1995 decl_attributes (&decl, attrs, ATTR_FLAG_BUILT_IN);
1996 return decl;
1997 }
1998
1999 /* Return an integer type with the number of bits of precision given by
2000 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2001 it is a signed type. */
2002
2003 tree
2004 gnat_type_for_size (unsigned precision, int unsignedp)
2005 {
2006 tree t;
2007 char type_name[20];
2008
2009 if (precision <= 2 * MAX_BITS_PER_WORD
2010 && signed_and_unsigned_types[precision][unsignedp] != 0)
2011 return signed_and_unsigned_types[precision][unsignedp];
2012
2013 if (unsignedp)
2014 t = make_unsigned_type (precision);
2015 else
2016 t = make_signed_type (precision);
2017
2018 if (precision <= 2 * MAX_BITS_PER_WORD)
2019 signed_and_unsigned_types[precision][unsignedp] = t;
2020
2021 if (TYPE_NAME (t) == 0)
2022 {
2023 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2024 TYPE_NAME (t) = get_identifier (type_name);
2025 }
2026
2027 return t;
2028 }
2029
2030 /* Likewise for floating-point types. */
2031
2032 static tree
2033 float_type_for_precision (int precision, enum machine_mode mode)
2034 {
2035 tree t;
2036 char type_name[20];
2037
2038 if (float_types[(int) mode] != 0)
2039 return float_types[(int) mode];
2040
2041 float_types[(int) mode] = t = make_node (REAL_TYPE);
2042 TYPE_PRECISION (t) = precision;
2043 layout_type (t);
2044
2045 if (TYPE_MODE (t) != mode)
2046 gigi_abort (414);
2047
2048 if (TYPE_NAME (t) == 0)
2049 {
2050 sprintf (type_name, "FLOAT_%d", precision);
2051 TYPE_NAME (t) = get_identifier (type_name);
2052 }
2053
2054 return t;
2055 }
2056
2057 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2058 an unsigned type; otherwise a signed type is returned. */
2059
2060 tree
2061 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2062 {
2063 if (GET_MODE_CLASS (mode) == MODE_FLOAT)
2064 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2065 else
2066 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2067 }
2068
2069 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2070
2071 tree
2072 gnat_unsigned_type (tree type_node)
2073 {
2074 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2075
2076 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2077 {
2078 type = copy_node (type);
2079 TREE_TYPE (type) = type_node;
2080 }
2081 else if (TREE_TYPE (type_node) != 0
2082 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2083 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2084 {
2085 type = copy_node (type);
2086 TREE_TYPE (type) = TREE_TYPE (type_node);
2087 }
2088
2089 return type;
2090 }
2091
2092 /* Return the signed version of a TYPE_NODE, a scalar type. */
2093
2094 tree
2095 gnat_signed_type (tree type_node)
2096 {
2097 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2098
2099 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2100 {
2101 type = copy_node (type);
2102 TREE_TYPE (type) = type_node;
2103 }
2104 else if (TREE_TYPE (type_node) != 0
2105 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2106 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2107 {
2108 type = copy_node (type);
2109 TREE_TYPE (type) = TREE_TYPE (type_node);
2110 }
2111
2112 return type;
2113 }
2114
2115 /* Return a type the same as TYPE except unsigned or signed according to
2116 UNSIGNEDP. */
2117
2118 tree
2119 gnat_signed_or_unsigned_type (int unsignedp, tree type)
2120 {
2121 if (! INTEGRAL_TYPE_P (type) || TREE_UNSIGNED (type) == unsignedp)
2122 return type;
2123 else
2124 return gnat_type_for_size (TYPE_PRECISION (type), unsignedp);
2125 }
2126 \f
2127 /* EXP is an expression for the size of an object. If this size contains
2128 discriminant references, replace them with the maximum (if MAX_P) or
2129 minimum (if ! MAX_P) possible value of the discriminant. */
2130
2131 tree
2132 max_size (tree exp, int max_p)
2133 {
2134 enum tree_code code = TREE_CODE (exp);
2135 tree type = TREE_TYPE (exp);
2136
2137 switch (TREE_CODE_CLASS (code))
2138 {
2139 case 'd':
2140 case 'c':
2141 return exp;
2142
2143 case 'x':
2144 if (code == TREE_LIST)
2145 return tree_cons (TREE_PURPOSE (exp),
2146 max_size (TREE_VALUE (exp), max_p),
2147 TREE_CHAIN (exp) != 0
2148 ? max_size (TREE_CHAIN (exp), max_p) : 0);
2149 break;
2150
2151 case 'r':
2152 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2153 modify. Otherwise, we treat it like a variable. */
2154 if (! CONTAINS_PLACEHOLDER_P (exp))
2155 return exp;
2156
2157 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2158 return
2159 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), 1);
2160
2161 case '<':
2162 return max_p ? size_one_node : size_zero_node;
2163
2164 case '1':
2165 case '2':
2166 case 'e':
2167 switch (TREE_CODE_LENGTH (code))
2168 {
2169 case 1:
2170 if (code == NON_LVALUE_EXPR)
2171 return max_size (TREE_OPERAND (exp, 0), max_p);
2172 else
2173 return
2174 fold (build1 (code, type,
2175 max_size (TREE_OPERAND (exp, 0),
2176 code == NEGATE_EXPR ? ! max_p : max_p)));
2177
2178 case 2:
2179 if (code == RTL_EXPR)
2180 gigi_abort (407);
2181 else if (code == COMPOUND_EXPR)
2182 return max_size (TREE_OPERAND (exp, 1), max_p);
2183 else if (code == WITH_RECORD_EXPR)
2184 return exp;
2185
2186 {
2187 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2188 tree rhs = max_size (TREE_OPERAND (exp, 1),
2189 code == MINUS_EXPR ? ! max_p : max_p);
2190
2191 /* Special-case wanting the maximum value of a MIN_EXPR.
2192 In that case, if one side overflows, return the other.
2193 sizetype is signed, but we know sizes are non-negative.
2194 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2195 overflowing or the maximum possible value and the RHS
2196 a variable. */
2197 if (max_p && code == MIN_EXPR && TREE_OVERFLOW (rhs))
2198 return lhs;
2199 else if (max_p && code == MIN_EXPR && TREE_OVERFLOW (lhs))
2200 return rhs;
2201 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2202 && ((TREE_CONSTANT (lhs) && TREE_OVERFLOW (lhs))
2203 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2204 && ! TREE_CONSTANT (rhs))
2205 return lhs;
2206 else
2207 return fold (build (code, type, lhs, rhs));
2208 }
2209
2210 case 3:
2211 if (code == SAVE_EXPR)
2212 return exp;
2213 else if (code == COND_EXPR)
2214 return fold (build (MAX_EXPR, type,
2215 max_size (TREE_OPERAND (exp, 1), max_p),
2216 max_size (TREE_OPERAND (exp, 2), max_p)));
2217 else if (code == CALL_EXPR && TREE_OPERAND (exp, 1) != 0)
2218 return build (CALL_EXPR, type, TREE_OPERAND (exp, 0),
2219 max_size (TREE_OPERAND (exp, 1), max_p));
2220 }
2221 }
2222
2223 gigi_abort (408);
2224 }
2225 \f
2226 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2227 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2228 Return a constructor for the template. */
2229
2230 tree
2231 build_template (tree template_type, tree array_type, tree expr)
2232 {
2233 tree template_elts = NULL_TREE;
2234 tree bound_list = NULL_TREE;
2235 tree field;
2236
2237 if (TREE_CODE (array_type) == RECORD_TYPE
2238 && (TYPE_IS_PADDING_P (array_type)
2239 || TYPE_LEFT_JUSTIFIED_MODULAR_P (array_type)))
2240 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2241
2242 if (TREE_CODE (array_type) == ARRAY_TYPE
2243 || (TREE_CODE (array_type) == INTEGER_TYPE
2244 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2245 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2246
2247 /* First make the list for a CONSTRUCTOR for the template. Go down the
2248 field list of the template instead of the type chain because this
2249 array might be an Ada array of arrays and we can't tell where the
2250 nested arrays stop being the underlying object. */
2251
2252 for (field = TYPE_FIELDS (template_type); field;
2253 (bound_list != 0
2254 ? (bound_list = TREE_CHAIN (bound_list))
2255 : (array_type = TREE_TYPE (array_type))),
2256 field = TREE_CHAIN (TREE_CHAIN (field)))
2257 {
2258 tree bounds, min, max;
2259
2260 /* If we have a bound list, get the bounds from there. Likewise
2261 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2262 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2263 This will give us a maximum range. */
2264 if (bound_list != 0)
2265 bounds = TREE_VALUE (bound_list);
2266 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2267 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2268 else if (expr != 0 && TREE_CODE (expr) == PARM_DECL
2269 && DECL_BY_COMPONENT_PTR_P (expr))
2270 bounds = TREE_TYPE (field);
2271 else
2272 gigi_abort (411);
2273
2274 min = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MIN_VALUE (bounds));
2275 max = convert (TREE_TYPE (field), TYPE_MAX_VALUE (bounds));
2276
2277 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2278 surround them with a WITH_RECORD_EXPR giving EXPR as the
2279 OBJECT. */
2280 if (CONTAINS_PLACEHOLDER_P (min))
2281 min = build (WITH_RECORD_EXPR, TREE_TYPE (min), min, expr);
2282 if (CONTAINS_PLACEHOLDER_P (max))
2283 max = build (WITH_RECORD_EXPR, TREE_TYPE (max), max, expr);
2284
2285 template_elts = tree_cons (TREE_CHAIN (field), max,
2286 tree_cons (field, min, template_elts));
2287 }
2288
2289 return gnat_build_constructor (template_type, nreverse (template_elts));
2290 }
2291 \f
2292 /* Build a VMS descriptor from a Mechanism_Type, which must specify
2293 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2294 in the type contains in its DECL_INITIAL the expression to use when
2295 a constructor is made for the type. GNAT_ENTITY is a gnat node used
2296 to print out an error message if the mechanism cannot be applied to
2297 an object of that type and also for the name. */
2298
2299 tree
2300 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2301 {
2302 tree record_type = make_node (RECORD_TYPE);
2303 tree field_list = 0;
2304 int class;
2305 int dtype = 0;
2306 tree inner_type;
2307 int ndim;
2308 int i;
2309 tree *idx_arr;
2310 tree tem;
2311
2312 /* If TYPE is an unconstrained array, use the underlying array type. */
2313 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2314 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2315
2316 /* If this is an array, compute the number of dimensions in the array,
2317 get the index types, and point to the inner type. */
2318 if (TREE_CODE (type) != ARRAY_TYPE)
2319 ndim = 0;
2320 else
2321 for (ndim = 1, inner_type = type;
2322 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2323 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2324 ndim++, inner_type = TREE_TYPE (inner_type))
2325 ;
2326
2327 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2328
2329 if (mech != By_Descriptor_NCA
2330 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2331 for (i = ndim - 1, inner_type = type;
2332 i >= 0;
2333 i--, inner_type = TREE_TYPE (inner_type))
2334 idx_arr[i] = TYPE_DOMAIN (inner_type);
2335 else
2336 for (i = 0, inner_type = type;
2337 i < ndim;
2338 i++, inner_type = TREE_TYPE (inner_type))
2339 idx_arr[i] = TYPE_DOMAIN (inner_type);
2340
2341 /* Now get the DTYPE value. */
2342 switch (TREE_CODE (type))
2343 {
2344 case INTEGER_TYPE:
2345 case ENUMERAL_TYPE:
2346 if (TYPE_VAX_FLOATING_POINT_P (type))
2347 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2348 {
2349 case 6:
2350 dtype = 10;
2351 break;
2352 case 9:
2353 dtype = 11;
2354 break;
2355 case 15:
2356 dtype = 27;
2357 break;
2358 }
2359 else
2360 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2361 {
2362 case 8:
2363 dtype = TREE_UNSIGNED (type) ? 2 : 6;
2364 break;
2365 case 16:
2366 dtype = TREE_UNSIGNED (type) ? 3 : 7;
2367 break;
2368 case 32:
2369 dtype = TREE_UNSIGNED (type) ? 4 : 8;
2370 break;
2371 case 64:
2372 dtype = TREE_UNSIGNED (type) ? 5 : 9;
2373 break;
2374 case 128:
2375 dtype = TREE_UNSIGNED (type) ? 25 : 26;
2376 break;
2377 }
2378 break;
2379
2380 case REAL_TYPE:
2381 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2382 break;
2383
2384 case COMPLEX_TYPE:
2385 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2386 && TYPE_VAX_FLOATING_POINT_P (type))
2387 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2388 {
2389 case 6:
2390 dtype = 12;
2391 break;
2392 case 9:
2393 dtype = 13;
2394 break;
2395 case 15:
2396 dtype = 29;
2397 }
2398 else
2399 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2400 break;
2401
2402 case ARRAY_TYPE:
2403 dtype = 14;
2404 break;
2405
2406 default:
2407 break;
2408 }
2409
2410 /* Get the CLASS value. */
2411 switch (mech)
2412 {
2413 case By_Descriptor_A:
2414 class = 4;
2415 break;
2416 case By_Descriptor_NCA:
2417 class = 10;
2418 break;
2419 case By_Descriptor_SB:
2420 class = 15;
2421 break;
2422 default:
2423 class = 1;
2424 }
2425
2426 /* Make the type for a descriptor for VMS. The first four fields
2427 are the same for all types. */
2428
2429 field_list
2430 = chainon (field_list,
2431 make_descriptor_field
2432 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2433 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2434
2435 field_list = chainon (field_list,
2436 make_descriptor_field ("DTYPE",
2437 gnat_type_for_size (8, 1),
2438 record_type, size_int (dtype)));
2439 field_list = chainon (field_list,
2440 make_descriptor_field ("CLASS",
2441 gnat_type_for_size (8, 1),
2442 record_type, size_int (class)));
2443
2444 field_list
2445 = chainon (field_list,
2446 make_descriptor_field ("POINTER",
2447 build_pointer_type (type),
2448 record_type,
2449 build1 (ADDR_EXPR,
2450 build_pointer_type (type),
2451 build (PLACEHOLDER_EXPR,
2452 type))));
2453
2454 switch (mech)
2455 {
2456 case By_Descriptor:
2457 case By_Descriptor_S:
2458 break;
2459
2460 case By_Descriptor_SB:
2461 field_list
2462 = chainon (field_list,
2463 make_descriptor_field
2464 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2465 TREE_CODE (type) == ARRAY_TYPE
2466 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2467 field_list
2468 = chainon (field_list,
2469 make_descriptor_field
2470 ("SB_L2", gnat_type_for_size (32, 1), record_type,
2471 TREE_CODE (type) == ARRAY_TYPE
2472 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2473 break;
2474
2475 case By_Descriptor_A:
2476 case By_Descriptor_NCA:
2477 field_list = chainon (field_list,
2478 make_descriptor_field ("SCALE",
2479 gnat_type_for_size (8, 1),
2480 record_type,
2481 size_zero_node));
2482
2483 field_list = chainon (field_list,
2484 make_descriptor_field ("DIGITS",
2485 gnat_type_for_size (8, 1),
2486 record_type,
2487 size_zero_node));
2488
2489 field_list
2490 = chainon (field_list,
2491 make_descriptor_field
2492 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2493 size_int (mech == By_Descriptor_NCA
2494 ? 0
2495 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2496 : (TREE_CODE (type) == ARRAY_TYPE
2497 && TYPE_CONVENTION_FORTRAN_P (type)
2498 ? 224 : 192))));
2499
2500 field_list = chainon (field_list,
2501 make_descriptor_field ("DIMCT",
2502 gnat_type_for_size (8, 1),
2503 record_type,
2504 size_int (ndim)));
2505
2506 field_list = chainon (field_list,
2507 make_descriptor_field ("ARSIZE",
2508 gnat_type_for_size (32, 1),
2509 record_type,
2510 size_in_bytes (type)));
2511
2512 /* Now build a pointer to the 0,0,0... element. */
2513 tem = build (PLACEHOLDER_EXPR, type);
2514 for (i = 0, inner_type = type; i < ndim;
2515 i++, inner_type = TREE_TYPE (inner_type))
2516 tem = build (ARRAY_REF, TREE_TYPE (inner_type), tem,
2517 convert (TYPE_DOMAIN (inner_type), size_zero_node));
2518
2519 field_list
2520 = chainon (field_list,
2521 make_descriptor_field
2522 ("A0", build_pointer_type (inner_type), record_type,
2523 build1 (ADDR_EXPR, build_pointer_type (inner_type), tem)));
2524
2525 /* Next come the addressing coefficients. */
2526 tem = size_int (1);
2527 for (i = 0; i < ndim; i++)
2528 {
2529 char fname[3];
2530 tree idx_length
2531 = size_binop (MULT_EXPR, tem,
2532 size_binop (PLUS_EXPR,
2533 size_binop (MINUS_EXPR,
2534 TYPE_MAX_VALUE (idx_arr[i]),
2535 TYPE_MIN_VALUE (idx_arr[i])),
2536 size_int (1)));
2537
2538 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
2539 fname[1] = '0' + i, fname[2] = 0;
2540 field_list
2541 = chainon (field_list,
2542 make_descriptor_field (fname,
2543 gnat_type_for_size (32, 1),
2544 record_type, idx_length));
2545
2546 if (mech == By_Descriptor_NCA)
2547 tem = idx_length;
2548 }
2549
2550 /* Finally here are the bounds. */
2551 for (i = 0; i < ndim; i++)
2552 {
2553 char fname[3];
2554
2555 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2556 field_list
2557 = chainon (field_list,
2558 make_descriptor_field
2559 (fname, gnat_type_for_size (32, 1), record_type,
2560 TYPE_MIN_VALUE (idx_arr[i])));
2561
2562 fname[0] = 'U';
2563 field_list
2564 = chainon (field_list,
2565 make_descriptor_field
2566 (fname, gnat_type_for_size (32, 1), record_type,
2567 TYPE_MAX_VALUE (idx_arr[i])));
2568 }
2569 break;
2570
2571 default:
2572 post_error ("unsupported descriptor type for &", gnat_entity);
2573 }
2574
2575 finish_record_type (record_type, field_list, 0, 1);
2576 pushdecl (build_decl (TYPE_DECL, create_concat_name (gnat_entity, "DESC"),
2577 record_type));
2578
2579 return record_type;
2580 }
2581
2582 /* Utility routine for above code to make a field. */
2583
2584 static tree
2585 make_descriptor_field (const char *name, tree type,
2586 tree rec_type, tree initial)
2587 {
2588 tree field
2589 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
2590
2591 DECL_INITIAL (field) = initial;
2592 return field;
2593 }
2594 \f
2595 /* Build a type to be used to represent an aliased object whose nominal
2596 type is an unconstrained array. This consists of a RECORD_TYPE containing
2597 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
2598 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
2599 is used to represent an arbitrary unconstrained object. Use NAME
2600 as the name of the record. */
2601
2602 tree
2603 build_unc_object_type (tree template_type, tree object_type, tree name)
2604 {
2605 tree type = make_node (RECORD_TYPE);
2606 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
2607 template_type, type, 0, 0, 0, 1);
2608 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
2609 type, 0, 0, 0, 1);
2610
2611 TYPE_NAME (type) = name;
2612 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
2613 finish_record_type (type,
2614 chainon (chainon (NULL_TREE, template_field),
2615 array_field),
2616 0, 0);
2617
2618 return type;
2619 }
2620 \f
2621 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE. In
2622 the normal case this is just two adjustments, but we have more to do
2623 if NEW is an UNCONSTRAINED_ARRAY_TYPE. */
2624
2625 void
2626 update_pointer_to (tree old_type, tree new_type)
2627 {
2628 tree ptr = TYPE_POINTER_TO (old_type);
2629 tree ref = TYPE_REFERENCE_TO (old_type);
2630 tree type;
2631
2632 /* If this is the main variant, process all the other variants first. */
2633 if (TYPE_MAIN_VARIANT (old_type) == old_type)
2634 for (type = TYPE_NEXT_VARIANT (old_type); type != 0;
2635 type = TYPE_NEXT_VARIANT (type))
2636 update_pointer_to (type, new_type);
2637
2638 /* If no pointer or reference, we are done. */
2639 if (ptr == 0 && ref == 0)
2640 return;
2641
2642 /* Merge the old type qualifiers in the new type.
2643
2644 Each old variant has qualifiers for specific reasons, and the new
2645 designated type as well. Each set of qualifiers represents useful
2646 information grabbed at some point, and merging the two simply unifies
2647 these inputs into the final type description.
2648
2649 Consider for instance a volatile type frozen after an access to constant
2650 type designating it. After the designated type freeze, we get here with a
2651 volatile new_type and a dummy old_type with a readonly variant, created
2652 when the access type was processed. We shall make a volatile and readonly
2653 designated type, because that's what it really is.
2654
2655 We might also get here for a non-dummy old_type variant with different
2656 qualifiers than the new_type ones, for instance in some cases of pointers
2657 to private record type elaboration (see the comments around the call to
2658 this routine from gnat_to_gnu_entity/E_Access_Type). We have to merge the
2659 qualifiers in thoses cases too, to avoid accidentally discarding the
2660 initial set, and will often end up with old_type == new_type then. */
2661 new_type = build_qualified_type (new_type,
2662 TYPE_QUALS (old_type)
2663 | TYPE_QUALS (new_type));
2664
2665 /* If the new type and the old one are identical, there is nothing to
2666 update. */
2667 if (old_type == new_type)
2668 return;
2669
2670 /* Otherwise, first handle the simple case. */
2671 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
2672 {
2673 if (ptr != 0)
2674 TREE_TYPE (ptr) = new_type;
2675 TYPE_POINTER_TO (new_type) = ptr;
2676
2677 if (ref != 0)
2678 TREE_TYPE (ref) = new_type;
2679 TYPE_REFERENCE_TO (new_type) = ref;
2680
2681 if (ptr != 0 && TYPE_NAME (ptr) != 0
2682 && TREE_CODE (TYPE_NAME (ptr)) == TYPE_DECL
2683 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2684 rest_of_decl_compilation (TYPE_NAME (ptr), NULL,
2685 global_bindings_p (), 0);
2686 if (ref != 0 && TYPE_NAME (ref) != 0
2687 && TREE_CODE (TYPE_NAME (ref)) == TYPE_DECL
2688 && TREE_CODE (new_type) != ENUMERAL_TYPE)
2689 rest_of_decl_compilation (TYPE_NAME (ref), NULL,
2690 global_bindings_p (), 0);
2691 }
2692
2693 /* Now deal with the unconstrained array case. In this case the "pointer"
2694 is actually a RECORD_TYPE where the types of both fields are
2695 pointers to void. In that case, copy the field list from the
2696 old type to the new one and update the fields' context. */
2697 else if (TREE_CODE (ptr) != RECORD_TYPE || ! TYPE_IS_FAT_POINTER_P (ptr))
2698 gigi_abort (412);
2699
2700 else
2701 {
2702 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
2703 tree ptr_temp_type;
2704 tree new_ref;
2705 tree var;
2706
2707 TYPE_FIELDS (ptr) = TYPE_FIELDS (TYPE_POINTER_TO (new_type));
2708 DECL_CONTEXT (TYPE_FIELDS (ptr)) = ptr;
2709 DECL_CONTEXT (TREE_CHAIN (TYPE_FIELDS (ptr))) = ptr;
2710
2711 /* Rework the PLACEHOLDER_EXPR inside the reference to the
2712 template bounds.
2713
2714 ??? This is now the only use of gnat_substitute_in_type, which
2715 is now a very "heavy" routine to do this, so it should be replaced
2716 at some point. */
2717 ptr_temp_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (ptr)));
2718 new_ref = build (COMPONENT_REF, ptr_temp_type,
2719 build (PLACEHOLDER_EXPR, ptr),
2720 TREE_CHAIN (TYPE_FIELDS (ptr)));
2721
2722 update_pointer_to
2723 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2724 gnat_substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
2725 TREE_CHAIN (TYPE_FIELDS (ptr)), new_ref));
2726
2727 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
2728 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
2729
2730 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
2731 = TREE_TYPE (new_type) = ptr;
2732
2733 /* Now handle updating the allocation record, what the thin pointer
2734 points to. Update all pointers from the old record into the new
2735 one, update the types of the fields, and recompute the size. */
2736
2737 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
2738
2739 TREE_TYPE (TYPE_FIELDS (new_obj_rec)) = TREE_TYPE (ptr_temp_type);
2740 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2741 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr)));
2742 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2743 = TYPE_SIZE (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2744 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
2745 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))));
2746
2747 TYPE_SIZE (new_obj_rec)
2748 = size_binop (PLUS_EXPR,
2749 DECL_SIZE (TYPE_FIELDS (new_obj_rec)),
2750 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2751 TYPE_SIZE_UNIT (new_obj_rec)
2752 = size_binop (PLUS_EXPR,
2753 DECL_SIZE_UNIT (TYPE_FIELDS (new_obj_rec)),
2754 DECL_SIZE_UNIT (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))));
2755 rest_of_type_compilation (ptr, global_bindings_p ());
2756 }
2757 }
2758 \f
2759 /* Convert a pointer to a constrained array into a pointer to a fat
2760 pointer. This involves making or finding a template. */
2761
2762 static tree
2763 convert_to_fat_pointer (tree type, tree expr)
2764 {
2765 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
2766 tree template, template_addr;
2767 tree etype = TREE_TYPE (expr);
2768
2769 /* If EXPR is a constant of zero, we make a fat pointer that has a null
2770 pointer to the template and array. */
2771 if (integer_zerop (expr))
2772 return
2773 gnat_build_constructor
2774 (type,
2775 tree_cons (TYPE_FIELDS (type),
2776 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2777 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2778 convert (build_pointer_type (template_type),
2779 expr),
2780 NULL_TREE)));
2781
2782 /* If EXPR is a thin pointer, make the template and data from the record. */
2783
2784 else if (TYPE_THIN_POINTER_P (etype))
2785 {
2786 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
2787
2788 expr = save_expr (expr);
2789 if (TREE_CODE (expr) == ADDR_EXPR)
2790 expr = TREE_OPERAND (expr, 0);
2791 else
2792 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
2793
2794 template = build_component_ref (expr, NULL_TREE, fields, 0);
2795 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
2796 build_component_ref (expr, NULL_TREE,
2797 TREE_CHAIN (fields), 0));
2798 }
2799 else
2800 /* Otherwise, build the constructor for the template. */
2801 template = build_template (template_type, TREE_TYPE (etype), expr);
2802
2803 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
2804
2805 /* The result is a CONSTRUCTOR for the fat pointer.
2806
2807 If expr is an argument of a foreign convention subprogram, the type it
2808 points to is directly the component type. In this case, the expression
2809 type may not match the corresponding FIELD_DECL type at this point, so we
2810 call "convert" here to fix that up if necessary. This type consistency is
2811 required, for instance because it ensures that possible later folding of
2812 component_refs against this constructor always yields something of the
2813 same type as the initial reference.
2814
2815 Note that the call to "build_template" above is still fine, because it
2816 will only refer to the provided template_type in this case. */
2817 return
2818 gnat_build_constructor
2819 (type, tree_cons (TYPE_FIELDS (type),
2820 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
2821 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2822 template_addr, NULL_TREE)));
2823 }
2824 \f
2825 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
2826 is something that is a fat pointer, so convert to it first if it EXPR
2827 is not already a fat pointer. */
2828
2829 static tree
2830 convert_to_thin_pointer (tree type, tree expr)
2831 {
2832 if (! TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
2833 expr
2834 = convert_to_fat_pointer
2835 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
2836
2837 /* We get the pointer to the data and use a NOP_EXPR to make it the
2838 proper GCC type. */
2839 expr
2840 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)), 0);
2841 expr = build1 (NOP_EXPR, type, expr);
2842
2843 return expr;
2844 }
2845 \f
2846 /* Create an expression whose value is that of EXPR,
2847 converted to type TYPE. The TREE_TYPE of the value
2848 is always TYPE. This function implements all reasonable
2849 conversions; callers should filter out those that are
2850 not permitted by the language being compiled. */
2851
2852 tree
2853 convert (tree type, tree expr)
2854 {
2855 enum tree_code code = TREE_CODE (type);
2856 tree etype = TREE_TYPE (expr);
2857 enum tree_code ecode = TREE_CODE (etype);
2858 tree tem;
2859
2860 /* If EXPR is already the right type, we are done. */
2861 if (type == etype)
2862 return expr;
2863 /* If we're converting between two aggregate types that have the same main
2864 variant, just make a NOP_EXPR. */
2865 else if (AGGREGATE_TYPE_P (type)
2866 && TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
2867 return build1 (NOP_EXPR, type, expr);
2868 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
2869 new one. */
2870 else if (TREE_CODE (expr) == WITH_RECORD_EXPR)
2871 return build (WITH_RECORD_EXPR, type,
2872 convert (type, TREE_OPERAND (expr, 0)),
2873 TREE_OPERAND (expr, 1));
2874
2875 /* If the input type has padding, remove it by doing a component reference
2876 to the field. If the output type has padding, make a constructor
2877 to build the record. If both input and output have padding and are
2878 of variable size, do this as an unchecked conversion. */
2879 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
2880 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
2881 && (! TREE_CONSTANT (TYPE_SIZE (type))
2882 || ! TREE_CONSTANT (TYPE_SIZE (etype))))
2883 ;
2884 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
2885 {
2886 /* If we have just converted to this padded type, just get
2887 the inner expression. */
2888 if (TREE_CODE (expr) == CONSTRUCTOR
2889 && CONSTRUCTOR_ELTS (expr) != 0
2890 && TREE_PURPOSE (CONSTRUCTOR_ELTS (expr)) == TYPE_FIELDS (etype))
2891 return TREE_VALUE (CONSTRUCTOR_ELTS (expr));
2892 else
2893 return convert (type, build_component_ref (expr, NULL_TREE,
2894 TYPE_FIELDS (etype), 0));
2895 }
2896 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
2897 {
2898 /* If we previously converted from another type and our type is
2899 of variable size, remove the conversion to avoid the need for
2900 variable-size temporaries. */
2901 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
2902 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2903 expr = TREE_OPERAND (expr, 0);
2904
2905 /* If we are just removing the padding from expr, convert the original
2906 object if we have variable size. That will avoid the need
2907 for some variable-size temporaries. */
2908 if (TREE_CODE (expr) == COMPONENT_REF
2909 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
2910 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
2911 && ! TREE_CONSTANT (TYPE_SIZE (type)))
2912 return convert (type, TREE_OPERAND (expr, 0));
2913
2914 /* If the result type is a padded type with a self-referentially-sized
2915 field and the expression type is a record, do this as an
2916 unchecked converstion. */
2917 else if (TREE_CODE (etype) == RECORD_TYPE
2918 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
2919 return unchecked_convert (type, expr, 0);
2920
2921 else
2922 return
2923 gnat_build_constructor (type,
2924 tree_cons (TYPE_FIELDS (type),
2925 convert (TREE_TYPE
2926 (TYPE_FIELDS (type)),
2927 expr),
2928 NULL_TREE));
2929 }
2930
2931 /* If the input is a biased type, adjust first. */
2932 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
2933 return convert (type, fold (build (PLUS_EXPR, TREE_TYPE (etype),
2934 fold (build1 (GNAT_NOP_EXPR,
2935 TREE_TYPE (etype), expr)),
2936 TYPE_MIN_VALUE (etype))));
2937
2938 /* If the input is a left-justified modular type, we need to extract
2939 the actual object before converting it to any other type with the
2940 exception of an unconstrained array. */
2941 if (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)
2942 && code != UNCONSTRAINED_ARRAY_TYPE)
2943 return convert (type, build_component_ref (expr, NULL_TREE,
2944 TYPE_FIELDS (etype), 0));
2945
2946 /* If converting to a type that contains a template, convert to the data
2947 type and then build the template. */
2948 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
2949 {
2950 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
2951
2952 /* If the source already has a template, get a reference to the
2953 associated array only, as we are going to rebuild a template
2954 for the target type anyway. */
2955 expr = maybe_unconstrained_array (expr);
2956
2957 return
2958 gnat_build_constructor
2959 (type,
2960 tree_cons (TYPE_FIELDS (type),
2961 build_template (TREE_TYPE (TYPE_FIELDS (type)),
2962 obj_type, NULL_TREE),
2963 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
2964 convert (obj_type, expr), NULL_TREE)));
2965 }
2966
2967 /* There are some special cases of expressions that we process
2968 specially. */
2969 switch (TREE_CODE (expr))
2970 {
2971 case ERROR_MARK:
2972 return expr;
2973
2974 case TRANSFORM_EXPR:
2975 case NULL_EXPR:
2976 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
2977 conversion in gnat_expand_expr. NULL_EXPR does not represent
2978 and actual value, so no conversion is needed. */
2979 expr = copy_node (expr);
2980 TREE_TYPE (expr) = type;
2981 return expr;
2982
2983 case STRING_CST:
2984 case CONSTRUCTOR:
2985 /* If we are converting a STRING_CST to another constrained array type,
2986 just make a new one in the proper type. Likewise for a
2987 CONSTRUCTOR. */
2988 if (code == ecode && AGGREGATE_TYPE_P (etype)
2989 && ! (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
2990 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
2991 {
2992 expr = copy_node (expr);
2993 TREE_TYPE (expr) = type;
2994 return expr;
2995 }
2996 break;
2997
2998 case COMPONENT_REF:
2999 /* If we are converting between two aggregate types of the same
3000 kind, size, mode, and alignment, just make a new COMPONENT_REF.
3001 This avoid unneeded conversions which makes reference computations
3002 more complex. */
3003 if (code == ecode && TYPE_MODE (type) == TYPE_MODE (etype)
3004 && AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3005 && TYPE_ALIGN (type) == TYPE_ALIGN (etype)
3006 && operand_equal_p (TYPE_SIZE (type), TYPE_SIZE (etype), 0))
3007 return build (COMPONENT_REF, type, TREE_OPERAND (expr, 0),
3008 TREE_OPERAND (expr, 1));
3009
3010 break;
3011
3012 case UNCONSTRAINED_ARRAY_REF:
3013 /* Convert this to the type of the inner array by getting the address of
3014 the array from the template. */
3015 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3016 build_component_ref (TREE_OPERAND (expr, 0),
3017 get_identifier ("P_ARRAY"),
3018 NULL_TREE, 0));
3019 etype = TREE_TYPE (expr);
3020 ecode = TREE_CODE (etype);
3021 break;
3022
3023 case VIEW_CONVERT_EXPR:
3024 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype)
3025 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3026 return convert (type, TREE_OPERAND (expr, 0));
3027 break;
3028
3029 case INDIRECT_REF:
3030 /* If both types are record types, just convert the pointer and
3031 make a new INDIRECT_REF.
3032
3033 ??? Disable this for now since it causes problems with the
3034 code in build_binary_op for MODIFY_EXPR which wants to
3035 strip off conversions. But that code really is a mess and
3036 we need to do this a much better way some time. */
3037 if (0
3038 && (TREE_CODE (type) == RECORD_TYPE
3039 || TREE_CODE (type) == UNION_TYPE)
3040 && (TREE_CODE (etype) == RECORD_TYPE
3041 || TREE_CODE (etype) == UNION_TYPE)
3042 && ! TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3043 return build_unary_op (INDIRECT_REF, NULL_TREE,
3044 convert (build_pointer_type (type),
3045 TREE_OPERAND (expr, 0)));
3046 break;
3047
3048 default:
3049 break;
3050 }
3051
3052 /* Check for converting to a pointer to an unconstrained array. */
3053 if (TYPE_FAT_POINTER_P (type) && ! TYPE_FAT_POINTER_P (etype))
3054 return convert_to_fat_pointer (type, expr);
3055
3056 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3057 || (code == INTEGER_CST && ecode == INTEGER_CST
3058 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
3059 return fold (build1 (NOP_EXPR, type, expr));
3060
3061 switch (code)
3062 {
3063 case VOID_TYPE:
3064 return build1 (CONVERT_EXPR, type, expr);
3065
3066 case INTEGER_TYPE:
3067 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
3068 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
3069 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
3070 return unchecked_convert (type, expr, 0);
3071 else if (TYPE_BIASED_REPRESENTATION_P (type))
3072 return fold (build1 (CONVERT_EXPR, type,
3073 fold (build (MINUS_EXPR, TREE_TYPE (type),
3074 convert (TREE_TYPE (type), expr),
3075 TYPE_MIN_VALUE (type)))));
3076
3077 /* ... fall through ... */
3078
3079 case ENUMERAL_TYPE:
3080 return fold (convert_to_integer (type, expr));
3081
3082 case POINTER_TYPE:
3083 case REFERENCE_TYPE:
3084 /* If converting between two pointers to records denoting
3085 both a template and type, adjust if needed to account
3086 for any differing offsets, since one might be negative. */
3087 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
3088 {
3089 tree bit_diff
3090 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
3091 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
3092 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
3093 sbitsize_int (BITS_PER_UNIT));
3094
3095 expr = build1 (NOP_EXPR, type, expr);
3096 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
3097 if (integer_zerop (byte_diff))
3098 return expr;
3099
3100 return build_binary_op (PLUS_EXPR, type, expr,
3101 fold (convert_to_pointer (type, byte_diff)));
3102 }
3103
3104 /* If converting to a thin pointer, handle specially. */
3105 if (TYPE_THIN_POINTER_P (type)
3106 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
3107 return convert_to_thin_pointer (type, expr);
3108
3109 /* If converting fat pointer to normal pointer, get the pointer to the
3110 array and then convert it. */
3111 else if (TYPE_FAT_POINTER_P (etype))
3112 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
3113 NULL_TREE, 0);
3114
3115 return fold (convert_to_pointer (type, expr));
3116
3117 case REAL_TYPE:
3118 return fold (convert_to_real (type, expr));
3119
3120 case RECORD_TYPE:
3121 if (TYPE_LEFT_JUSTIFIED_MODULAR_P (type) && ! AGGREGATE_TYPE_P (etype))
3122 return
3123 gnat_build_constructor
3124 (type, tree_cons (TYPE_FIELDS (type),
3125 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
3126 NULL_TREE));
3127
3128 /* ... fall through ... */
3129
3130 case ARRAY_TYPE:
3131 /* In these cases, assume the front-end has validated the conversion.
3132 If the conversion is valid, it will be a bit-wise conversion, so
3133 it can be viewed as an unchecked conversion. */
3134 return unchecked_convert (type, expr, 0);
3135
3136 case UNION_TYPE:
3137 /* Just validate that the type is indeed that of a field
3138 of the type. Then make the simple conversion. */
3139 for (tem = TYPE_FIELDS (type); tem; tem = TREE_CHAIN (tem))
3140 {
3141 if (TREE_TYPE (tem) == etype)
3142 return build1 (CONVERT_EXPR, type, expr);
3143 else if (TREE_CODE (TREE_TYPE (tem)) == RECORD_TYPE
3144 && (TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (tem))
3145 || TYPE_IS_PADDING_P (TREE_TYPE (tem)))
3146 && TREE_TYPE (TYPE_FIELDS (TREE_TYPE (tem))) == etype)
3147 return build1 (CONVERT_EXPR, type,
3148 convert (TREE_TYPE (tem), expr));
3149 }
3150
3151 gigi_abort (413);
3152
3153 case UNCONSTRAINED_ARRAY_TYPE:
3154 /* If EXPR is a constrained array, take its address, convert it to a
3155 fat pointer, and then dereference it. Likewise if EXPR is a
3156 record containing both a template and a constrained array.
3157 Note that a record representing a left justified modular type
3158 always represents a packed constrained array. */
3159 if (ecode == ARRAY_TYPE
3160 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
3161 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
3162 || (ecode == RECORD_TYPE && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype)))
3163 return
3164 build_unary_op
3165 (INDIRECT_REF, NULL_TREE,
3166 convert_to_fat_pointer (TREE_TYPE (type),
3167 build_unary_op (ADDR_EXPR,
3168 NULL_TREE, expr)));
3169
3170 /* Do something very similar for converting one unconstrained
3171 array to another. */
3172 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
3173 return
3174 build_unary_op (INDIRECT_REF, NULL_TREE,
3175 convert (TREE_TYPE (type),
3176 build_unary_op (ADDR_EXPR,
3177 NULL_TREE, expr)));
3178 else
3179 gigi_abort (409);
3180
3181 case COMPLEX_TYPE:
3182 return fold (convert_to_complex (type, expr));
3183
3184 default:
3185 gigi_abort (410);
3186 }
3187 }
3188 \f
3189 /* Remove all conversions that are done in EXP. This includes converting
3190 from a padded type or to a left-justified modular type. If TRUE_ADDRESS
3191 is nonzero, always return the address of the containing object even if
3192 the address is not bit-aligned. */
3193
3194 tree
3195 remove_conversions (tree exp, int true_address)
3196 {
3197 switch (TREE_CODE (exp))
3198 {
3199 case CONSTRUCTOR:
3200 if (true_address
3201 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
3202 && TYPE_LEFT_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
3203 return remove_conversions (TREE_VALUE (CONSTRUCTOR_ELTS (exp)), 1);
3204 break;
3205
3206 case COMPONENT_REF:
3207 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
3208 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
3209 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3210 break;
3211
3212 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
3213 case NOP_EXPR: case CONVERT_EXPR: case GNAT_NOP_EXPR:
3214 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
3215
3216 default:
3217 break;
3218 }
3219
3220 return exp;
3221 }
3222 \f
3223 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
3224 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
3225 likewise return an expression pointing to the underlying array. */
3226
3227 tree
3228 maybe_unconstrained_array (tree exp)
3229 {
3230 enum tree_code code = TREE_CODE (exp);
3231 tree new;
3232
3233 switch (TREE_CODE (TREE_TYPE (exp)))
3234 {
3235 case UNCONSTRAINED_ARRAY_TYPE:
3236 if (code == UNCONSTRAINED_ARRAY_REF)
3237 {
3238 new
3239 = build_unary_op (INDIRECT_REF, NULL_TREE,
3240 build_component_ref (TREE_OPERAND (exp, 0),
3241 get_identifier ("P_ARRAY"),
3242 NULL_TREE, 0));
3243 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
3244 return new;
3245 }
3246
3247 else if (code == NULL_EXPR)
3248 return build1 (NULL_EXPR,
3249 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
3250 (TREE_TYPE (TREE_TYPE (exp))))),
3251 TREE_OPERAND (exp, 0));
3252
3253 else if (code == WITH_RECORD_EXPR
3254 && (TREE_OPERAND (exp, 0)
3255 != (new = maybe_unconstrained_array
3256 (TREE_OPERAND (exp, 0)))))
3257 return build (WITH_RECORD_EXPR, TREE_TYPE (new), new,
3258 TREE_OPERAND (exp, 1));
3259
3260 case RECORD_TYPE:
3261 /* If this is a padded type, convert to the unpadded type and see if
3262 it contains a template. */
3263 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
3264 {
3265 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
3266 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
3267 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
3268 return
3269 build_component_ref (new, NULL_TREE,
3270 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
3271 0);
3272 }
3273 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
3274 return
3275 build_component_ref (exp, NULL_TREE,
3276 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
3277 break;
3278
3279 default:
3280 break;
3281 }
3282
3283 return exp;
3284 }
3285 \f
3286 /* Return an expression that does an unchecked converstion of EXPR to TYPE.
3287 If NOTRUNC_P is set, truncation operations should be suppressed. */
3288
3289 tree
3290 unchecked_convert (tree type, tree expr, int notrunc_p)
3291 {
3292 tree etype = TREE_TYPE (expr);
3293
3294 /* If the expression is already the right type, we are done. */
3295 if (etype == type)
3296 return expr;
3297
3298 /* If EXPR is a WITH_RECORD_EXPR, do the conversion inside and then make a
3299 new one. */
3300 if (TREE_CODE (expr) == WITH_RECORD_EXPR)
3301 return build (WITH_RECORD_EXPR, type,
3302 unchecked_convert (type, TREE_OPERAND (expr, 0), notrunc_p),
3303 TREE_OPERAND (expr, 1));
3304
3305 /* If both types types are integral just do a normal conversion.
3306 Likewise for a conversion to an unconstrained array. */
3307 if ((((INTEGRAL_TYPE_P (type)
3308 && ! (TREE_CODE (type) == INTEGER_TYPE
3309 && TYPE_VAX_FLOATING_POINT_P (type)))
3310 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
3311 || (TREE_CODE (type) == RECORD_TYPE
3312 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type)))
3313 && ((INTEGRAL_TYPE_P (etype)
3314 && ! (TREE_CODE (etype) == INTEGER_TYPE
3315 && TYPE_VAX_FLOATING_POINT_P (etype)))
3316 || (POINTER_TYPE_P (etype) && ! TYPE_THIN_POINTER_P (etype))
3317 || (TREE_CODE (etype) == RECORD_TYPE
3318 && TYPE_LEFT_JUSTIFIED_MODULAR_P (etype))))
3319 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3320 {
3321 tree rtype = type;
3322
3323 if (TREE_CODE (etype) == INTEGER_TYPE
3324 && TYPE_BIASED_REPRESENTATION_P (etype))
3325 {
3326 tree ntype = copy_type (etype);
3327
3328 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
3329 TYPE_MAIN_VARIANT (ntype) = ntype;
3330 expr = build1 (GNAT_NOP_EXPR, ntype, expr);
3331 }
3332
3333 if (TREE_CODE (type) == INTEGER_TYPE
3334 && TYPE_BIASED_REPRESENTATION_P (type))
3335 {
3336 rtype = copy_type (type);
3337 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
3338 TYPE_MAIN_VARIANT (rtype) = rtype;
3339 }
3340
3341 expr = convert (rtype, expr);
3342 if (type != rtype)
3343 expr = build1 (GNAT_NOP_EXPR, type, expr);
3344 }
3345
3346 /* If we are converting TO an integral type whose precision is not the
3347 same as its size, first unchecked convert to a record that contains
3348 an object of the output type. Then extract the field. */
3349 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3350 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3351 GET_MODE_BITSIZE (TYPE_MODE (type))))
3352 {
3353 tree rec_type = make_node (RECORD_TYPE);
3354 tree field = create_field_decl (get_identifier ("OBJ"), type,
3355 rec_type, 1, 0, 0, 0);
3356
3357 TYPE_FIELDS (rec_type) = field;
3358 layout_type (rec_type);
3359
3360 expr = unchecked_convert (rec_type, expr, notrunc_p);
3361 expr = build_component_ref (expr, NULL_TREE, field, 0);
3362 }
3363
3364 /* Similarly for integral input type whose precision is not equal to its
3365 size. */
3366 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype) != 0
3367 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
3368 GET_MODE_BITSIZE (TYPE_MODE (etype))))
3369 {
3370 tree rec_type = make_node (RECORD_TYPE);
3371 tree field
3372 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
3373 1, 0, 0, 0);
3374
3375 TYPE_FIELDS (rec_type) = field;
3376 layout_type (rec_type);
3377
3378 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
3379 expr = unchecked_convert (type, expr, notrunc_p);
3380 }
3381
3382 /* We have a special case when we are converting between two
3383 unconstrained array types. In that case, take the address,
3384 convert the fat pointer types, and dereference. */
3385 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
3386 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3387 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3388 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
3389 build_unary_op (ADDR_EXPR, NULL_TREE,
3390 expr)));
3391 else
3392 {
3393 expr = maybe_unconstrained_array (expr);
3394 etype = TREE_TYPE (expr);
3395 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
3396 }
3397
3398 /* If the result is an integral type whose size is not equal to
3399 the size of the underlying machine type, sign- or zero-extend
3400 the result. We need not do this in the case where the input is
3401 an integral type of the same precision and signedness or if the output
3402 is a biased type or if both the input and output are unsigned. */
3403 if (! notrunc_p
3404 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type) != 0
3405 && ! (TREE_CODE (type) == INTEGER_TYPE
3406 && TYPE_BIASED_REPRESENTATION_P (type))
3407 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
3408 GET_MODE_BITSIZE (TYPE_MODE (type)))
3409 && ! (INTEGRAL_TYPE_P (etype)
3410 && TREE_UNSIGNED (type) == TREE_UNSIGNED (etype)
3411 && operand_equal_p (TYPE_RM_SIZE (type),
3412 (TYPE_RM_SIZE (etype) != 0
3413 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
3414 0))
3415 && ! (TREE_UNSIGNED (type) && TREE_UNSIGNED (etype)))
3416 {
3417 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
3418 TREE_UNSIGNED (type));
3419 tree shift_expr
3420 = convert (base_type,
3421 size_binop (MINUS_EXPR,
3422 bitsize_int
3423 (GET_MODE_BITSIZE (TYPE_MODE (type))),
3424 TYPE_RM_SIZE (type)));
3425 expr
3426 = convert (type,
3427 build_binary_op (RSHIFT_EXPR, base_type,
3428 build_binary_op (LSHIFT_EXPR, base_type,
3429 convert (base_type, expr),
3430 shift_expr),
3431 shift_expr));
3432 }
3433
3434 /* An unchecked conversion should never raise Constraint_Error. The code
3435 below assumes that GCC's conversion routines overflow the same way that
3436 the underlying hardware does. This is probably true. In the rare case
3437 when it is false, we can rely on the fact that such conversions are
3438 erroneous anyway. */
3439 if (TREE_CODE (expr) == INTEGER_CST)
3440 TREE_OVERFLOW (expr) = TREE_CONSTANT_OVERFLOW (expr) = 0;
3441
3442 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
3443 show no longer constant. */
3444 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3445 && ! operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype), 1))
3446 TREE_CONSTANT (expr) = 0;
3447
3448 return expr;
3449 }
3450
3451 #include "gt-ada-utils.h"
3452 #include "gtype-ada.h"