]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils.c
gigi.h (create_index_type): Adjust head comment.
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2009, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 /* We have attribute handlers using C specific format specifiers in warning
27 messages. Make sure they are properly recognized. */
28 #define GCC_DIAG_STYLE __gcc_cdiag__
29
30 #include "config.h"
31 #include "system.h"
32 #include "coretypes.h"
33 #include "tm.h"
34 #include "tree.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "rtl.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "function.h"
44 #include "langhooks.h"
45 #include "pointer-set.h"
46 #include "cgraph.h"
47 #include "tree-dump.h"
48 #include "tree-inline.h"
49 #include "tree-iterator.h"
50 #include "gimple.h"
51
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
65
66 #ifndef MAX_FIXED_MODE_SIZE
67 #define MAX_FIXED_MODE_SIZE GET_MODE_BITSIZE (DImode)
68 #endif
69
70 #ifndef MAX_BITS_PER_WORD
71 #define MAX_BITS_PER_WORD BITS_PER_WORD
72 #endif
73
74 /* If nonzero, pretend we are allocating at global level. */
75 int force_global;
76
77 /* Tree nodes for the various types and decls we create. */
78 tree gnat_std_decls[(int) ADT_LAST];
79
80 /* Functions to call for each of the possible raise reasons. */
81 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
82
83 /* Forward declarations for handlers of attributes. */
84 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
85 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
93
94 /* Fake handler for attributes we don't properly support, typically because
95 they'd require dragging a lot of the common-c front-end circuitry. */
96 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
97
98 /* Table of machine-independent internal attributes for Ada. We support
99 this minimal set of attributes to accommodate the needs of builtins. */
100 const struct attribute_spec gnat_internal_attribute_table[] =
101 {
102 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler } */
103 { "const", 0, 0, true, false, false, handle_const_attribute },
104 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute },
105 { "pure", 0, 0, true, false, false, handle_pure_attribute },
106 { "no vops", 0, 0, true, false, false, handle_novops_attribute },
107 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute },
108 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute },
109 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute },
110 { "malloc", 0, 0, true, false, false, handle_malloc_attribute },
111 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute },
112
113 /* ??? format and format_arg are heavy and not supported, which actually
114 prevents support for stdio builtins, which we however declare as part
115 of the common builtins.def contents. */
116 { "format", 3, 3, false, true, true, fake_attribute_handler },
117 { "format_arg", 1, 1, false, true, true, fake_attribute_handler },
118
119 { NULL, 0, 0, false, false, false, NULL }
120 };
121
122 /* Associates a GNAT tree node to a GCC tree node. It is used in
123 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
124 of `save_gnu_tree' for more info. */
125 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
126
127 #define GET_GNU_TREE(GNAT_ENTITY) \
128 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
129
130 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
131 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
132
133 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
134 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
135
136 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
137 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
138
139 #define GET_DUMMY_NODE(GNAT_ENTITY) \
140 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
141
142 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
143 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
144
145 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
146 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
147
148 /* This variable keeps a table for types for each precision so that we only
149 allocate each of them once. Signed and unsigned types are kept separate.
150
151 Note that these types are only used when fold-const requests something
152 special. Perhaps we should NOT share these types; we'll see how it
153 goes later. */
154 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
155
156 /* Likewise for float types, but record these by mode. */
157 static GTY(()) tree float_types[NUM_MACHINE_MODES];
158
159 /* For each binding contour we allocate a binding_level structure to indicate
160 the binding depth. */
161
162 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
163 /* The binding level containing this one (the enclosing binding level). */
164 struct gnat_binding_level *chain;
165 /* The BLOCK node for this level. */
166 tree block;
167 /* If nonzero, the setjmp buffer that needs to be updated for any
168 variable-sized definition within this context. */
169 tree jmpbuf_decl;
170 };
171
172 /* The binding level currently in effect. */
173 static GTY(()) struct gnat_binding_level *current_binding_level;
174
175 /* A chain of gnat_binding_level structures awaiting reuse. */
176 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
177
178 /* An array of global declarations. */
179 static GTY(()) VEC(tree,gc) *global_decls;
180
181 /* An array of builtin function declarations. */
182 static GTY(()) VEC(tree,gc) *builtin_decls;
183
184 /* An array of global renaming pointers. */
185 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
186
187 /* A chain of unused BLOCK nodes. */
188 static GTY((deletable)) tree free_block_chain;
189
190 static tree merge_sizes (tree, tree, tree, bool, bool);
191 static tree compute_related_constant (tree, tree);
192 static tree split_plus (tree, tree *);
193 static void gnat_gimplify_function (tree);
194 static tree float_type_for_precision (int, enum machine_mode);
195 static tree convert_to_fat_pointer (tree, tree);
196 static tree convert_to_thin_pointer (tree, tree);
197 static tree make_descriptor_field (const char *,tree, tree, tree);
198 static bool potential_alignment_gap (tree, tree, tree);
199 \f
200 /* Initialize the association of GNAT nodes to GCC trees. */
201
202 void
203 init_gnat_to_gnu (void)
204 {
205 associate_gnat_to_gnu
206 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
207 }
208
209 /* GNAT_ENTITY is a GNAT tree node for an entity. GNU_DECL is the GCC tree
210 which is to be associated with GNAT_ENTITY. Such GCC tree node is always
211 a ..._DECL node. If NO_CHECK is true, the latter check is suppressed.
212
213 If GNU_DECL is zero, a previous association is to be reset. */
214
215 void
216 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
217 {
218 /* Check that GNAT_ENTITY is not already defined and that it is being set
219 to something which is a decl. Raise gigi 401 if not. Usually, this
220 means GNAT_ENTITY is defined twice, but occasionally is due to some
221 Gigi problem. */
222 gcc_assert (!(gnu_decl
223 && (PRESENT_GNU_TREE (gnat_entity)
224 || (!no_check && !DECL_P (gnu_decl)))));
225
226 SET_GNU_TREE (gnat_entity, gnu_decl);
227 }
228
229 /* GNAT_ENTITY is a GNAT tree node for a defining identifier.
230 Return the ..._DECL node that was associated with it. If there is no tree
231 node associated with GNAT_ENTITY, abort.
232
233 In some cases, such as delayed elaboration or expressions that need to
234 be elaborated only once, GNAT_ENTITY is really not an entity. */
235
236 tree
237 get_gnu_tree (Entity_Id gnat_entity)
238 {
239 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
240 return GET_GNU_TREE (gnat_entity);
241 }
242
243 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
244
245 bool
246 present_gnu_tree (Entity_Id gnat_entity)
247 {
248 return PRESENT_GNU_TREE (gnat_entity);
249 }
250 \f
251 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
252
253 void
254 init_dummy_type (void)
255 {
256 dummy_node_table
257 = (tree *) ggc_alloc_cleared (max_gnat_nodes * sizeof (tree));
258 }
259
260 /* Make a dummy type corresponding to GNAT_TYPE. */
261
262 tree
263 make_dummy_type (Entity_Id gnat_type)
264 {
265 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
266 tree gnu_type;
267
268 /* If there is an equivalent type, get its underlying type. */
269 if (Present (gnat_underlying))
270 gnat_underlying = Underlying_Type (gnat_underlying);
271
272 /* If there was no equivalent type (can only happen when just annotating
273 types) or underlying type, go back to the original type. */
274 if (No (gnat_underlying))
275 gnat_underlying = gnat_type;
276
277 /* If it there already a dummy type, use that one. Else make one. */
278 if (PRESENT_DUMMY_NODE (gnat_underlying))
279 return GET_DUMMY_NODE (gnat_underlying);
280
281 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
282 an ENUMERAL_TYPE. */
283 gnu_type = make_node (Is_Record_Type (gnat_underlying)
284 ? tree_code_for_record_type (gnat_underlying)
285 : ENUMERAL_TYPE);
286 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
287 TYPE_DUMMY_P (gnu_type) = 1;
288 TYPE_STUB_DECL (gnu_type)
289 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
290 if (AGGREGATE_TYPE_P (gnu_type))
291 TYPE_BY_REFERENCE_P (gnu_type) = Is_By_Reference_Type (gnat_type);
292
293 SET_DUMMY_NODE (gnat_underlying, gnu_type);
294
295 return gnu_type;
296 }
297 \f
298 /* Return nonzero if we are currently in the global binding level. */
299
300 int
301 global_bindings_p (void)
302 {
303 return ((force_global || !current_function_decl) ? -1 : 0);
304 }
305
306 /* Enter a new binding level. */
307
308 void
309 gnat_pushlevel ()
310 {
311 struct gnat_binding_level *newlevel = NULL;
312
313 /* Reuse a struct for this binding level, if there is one. */
314 if (free_binding_level)
315 {
316 newlevel = free_binding_level;
317 free_binding_level = free_binding_level->chain;
318 }
319 else
320 newlevel
321 = (struct gnat_binding_level *)
322 ggc_alloc (sizeof (struct gnat_binding_level));
323
324 /* Use a free BLOCK, if any; otherwise, allocate one. */
325 if (free_block_chain)
326 {
327 newlevel->block = free_block_chain;
328 free_block_chain = BLOCK_CHAIN (free_block_chain);
329 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
330 }
331 else
332 newlevel->block = make_node (BLOCK);
333
334 /* Point the BLOCK we just made to its parent. */
335 if (current_binding_level)
336 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
337
338 BLOCK_VARS (newlevel->block) = BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
339 TREE_USED (newlevel->block) = 1;
340
341 /* Add this level to the front of the chain (stack) of levels that are
342 active. */
343 newlevel->chain = current_binding_level;
344 newlevel->jmpbuf_decl = NULL_TREE;
345 current_binding_level = newlevel;
346 }
347
348 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
349 and point FNDECL to this BLOCK. */
350
351 void
352 set_current_block_context (tree fndecl)
353 {
354 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
355 DECL_INITIAL (fndecl) = current_binding_level->block;
356 }
357
358 /* Set the jmpbuf_decl for the current binding level to DECL. */
359
360 void
361 set_block_jmpbuf_decl (tree decl)
362 {
363 current_binding_level->jmpbuf_decl = decl;
364 }
365
366 /* Get the jmpbuf_decl, if any, for the current binding level. */
367
368 tree
369 get_block_jmpbuf_decl ()
370 {
371 return current_binding_level->jmpbuf_decl;
372 }
373
374 /* Exit a binding level. Set any BLOCK into the current code group. */
375
376 void
377 gnat_poplevel ()
378 {
379 struct gnat_binding_level *level = current_binding_level;
380 tree block = level->block;
381
382 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
383 BLOCK_SUBBLOCKS (block) = nreverse (BLOCK_SUBBLOCKS (block));
384
385 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
386 are no variables free the block and merge its subblocks into those of its
387 parent block. Otherwise, add it to the list of its parent. */
388 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
389 ;
390 else if (BLOCK_VARS (block) == NULL_TREE)
391 {
392 BLOCK_SUBBLOCKS (level->chain->block)
393 = chainon (BLOCK_SUBBLOCKS (block),
394 BLOCK_SUBBLOCKS (level->chain->block));
395 BLOCK_CHAIN (block) = free_block_chain;
396 free_block_chain = block;
397 }
398 else
399 {
400 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
401 BLOCK_SUBBLOCKS (level->chain->block) = block;
402 TREE_USED (block) = 1;
403 set_block_for_group (block);
404 }
405
406 /* Free this binding structure. */
407 current_binding_level = level->chain;
408 level->chain = free_binding_level;
409 free_binding_level = level;
410 }
411
412 \f
413 /* Records a ..._DECL node DECL as belonging to the current lexical scope
414 and uses GNAT_NODE for location information and propagating flags. */
415
416 void
417 gnat_pushdecl (tree decl, Node_Id gnat_node)
418 {
419 /* If this decl is public external or at toplevel, there is no context.
420 But PARM_DECLs always go in the level of its function. */
421 if (TREE_CODE (decl) != PARM_DECL
422 && ((DECL_EXTERNAL (decl) && TREE_PUBLIC (decl))
423 || global_bindings_p ()))
424 DECL_CONTEXT (decl) = 0;
425 else
426 {
427 DECL_CONTEXT (decl) = current_function_decl;
428
429 /* Functions imported in another function are not really nested. */
430 if (TREE_CODE (decl) == FUNCTION_DECL && TREE_PUBLIC (decl))
431 DECL_NO_STATIC_CHAIN (decl) = 1;
432 }
433
434 TREE_NO_WARNING (decl) = (gnat_node == Empty || Warnings_Off (gnat_node));
435
436 /* Set the location of DECL and emit a declaration for it. */
437 if (Present (gnat_node))
438 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
439 add_decl_expr (decl, gnat_node);
440
441 /* Put the declaration on the list. The list of declarations is in reverse
442 order. The list will be reversed later. Put global variables in the
443 globals list and builtin functions in a dedicated list to speed up
444 further lookups. Don't put TYPE_DECLs for UNCONSTRAINED_ARRAY_TYPE into
445 the list, as they will cause trouble with the debugger and aren't needed
446 anyway. */
447 if (TREE_CODE (decl) != TYPE_DECL
448 || TREE_CODE (TREE_TYPE (decl)) != UNCONSTRAINED_ARRAY_TYPE)
449 {
450 if (global_bindings_p ())
451 {
452 VEC_safe_push (tree, gc, global_decls, decl);
453
454 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
455 VEC_safe_push (tree, gc, builtin_decls, decl);
456 }
457 else
458 {
459 TREE_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
460 BLOCK_VARS (current_binding_level->block) = decl;
461 }
462 }
463
464 /* For the declaration of a type, set its name if it either is not already
465 set or if the previous type name was not derived from a source name.
466 We'd rather have the type named with a real name and all the pointer
467 types to the same object have the same POINTER_TYPE node. Code in the
468 equivalent function of c-decl.c makes a copy of the type node here, but
469 that may cause us trouble with incomplete types. We make an exception
470 for fat pointer types because the compiler automatically builds them
471 for unconstrained array types and the debugger uses them to represent
472 both these and pointers to these. */
473 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
474 {
475 tree t = TREE_TYPE (decl);
476
477 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
478 ;
479 else if (TYPE_FAT_POINTER_P (t))
480 {
481 tree tt = build_variant_type_copy (t);
482 TYPE_NAME (tt) = decl;
483 TREE_USED (tt) = TREE_USED (t);
484 TREE_TYPE (decl) = tt;
485 DECL_ORIGINAL_TYPE (decl) = t;
486 t = NULL_TREE;
487 }
488 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
489 ;
490 else
491 t = NULL_TREE;
492
493 /* Propagate the name to all the variants. This is needed for
494 the type qualifiers machinery to work properly. */
495 if (t)
496 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
497 TYPE_NAME (t) = decl;
498 }
499 }
500 \f
501 /* Do little here. Set up the standard declarations later after the
502 front end has been run. */
503
504 void
505 gnat_init_decl_processing (void)
506 {
507 /* Make the binding_level structure for global names. */
508 current_function_decl = 0;
509 current_binding_level = 0;
510 free_binding_level = 0;
511 gnat_pushlevel ();
512
513 build_common_tree_nodes (true, true);
514
515 /* In Ada, we use a signed type for SIZETYPE. Use the signed type
516 corresponding to the width of Pmode. In most cases when ptr_mode
517 and Pmode differ, C will use the width of ptr_mode for SIZETYPE.
518 But we get far better code using the width of Pmode. */
519 size_type_node = gnat_type_for_mode (Pmode, 0);
520 set_sizetype (size_type_node);
521
522 /* In Ada, we use an unsigned 8-bit type for the default boolean type. */
523 boolean_type_node = make_node (BOOLEAN_TYPE);
524 TYPE_PRECISION (boolean_type_node) = 1;
525 fixup_unsigned_type (boolean_type_node);
526 TYPE_RM_SIZE (boolean_type_node) = bitsize_int (1);
527
528 build_common_tree_nodes_2 (0);
529
530 ptr_void_type_node = build_pointer_type (void_type_node);
531 }
532 \f
533 /* Record TYPE as a builtin type for Ada. NAME is the name of the type. */
534
535 void
536 record_builtin_type (const char *name, tree type)
537 {
538 tree type_decl = build_decl (TYPE_DECL, get_identifier (name), type);
539
540 gnat_pushdecl (type_decl, Empty);
541
542 if (debug_hooks->type_decl)
543 debug_hooks->type_decl (type_decl, false);
544 }
545 \f
546 /* Given a record type RECORD_TYPE and a chain of FIELD_DECL nodes FIELDLIST,
547 finish constructing the record or union type. If REP_LEVEL is zero, this
548 record has no representation clause and so will be entirely laid out here.
549 If REP_LEVEL is one, this record has a representation clause and has been
550 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
551 this record is derived from a parent record and thus inherits its layout;
552 only make a pass on the fields to finalize them. If DO_NOT_FINALIZE is
553 true, the record type is expected to be modified afterwards so it will
554 not be sent to the back-end for finalization. */
555
556 void
557 finish_record_type (tree record_type, tree fieldlist, int rep_level,
558 bool do_not_finalize)
559 {
560 enum tree_code code = TREE_CODE (record_type);
561 tree name = TYPE_NAME (record_type);
562 tree ada_size = bitsize_zero_node;
563 tree size = bitsize_zero_node;
564 bool had_size = TYPE_SIZE (record_type) != 0;
565 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
566 bool had_align = TYPE_ALIGN (record_type) != 0;
567 tree field;
568
569 TYPE_FIELDS (record_type) = fieldlist;
570
571 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
572 generate debug info and have a parallel type. */
573 if (name && TREE_CODE (name) == TYPE_DECL)
574 name = DECL_NAME (name);
575 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
576
577 /* Globally initialize the record first. If this is a rep'ed record,
578 that just means some initializations; otherwise, layout the record. */
579 if (rep_level > 0)
580 {
581 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
582 SET_TYPE_MODE (record_type, BLKmode);
583
584 if (!had_size_unit)
585 TYPE_SIZE_UNIT (record_type) = size_zero_node;
586 if (!had_size)
587 TYPE_SIZE (record_type) = bitsize_zero_node;
588
589 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
590 out just like a UNION_TYPE, since the size will be fixed. */
591 else if (code == QUAL_UNION_TYPE)
592 code = UNION_TYPE;
593 }
594 else
595 {
596 /* Ensure there isn't a size already set. There can be in an error
597 case where there is a rep clause but all fields have errors and
598 no longer have a position. */
599 TYPE_SIZE (record_type) = 0;
600 layout_type (record_type);
601 }
602
603 /* At this point, the position and size of each field is known. It was
604 either set before entry by a rep clause, or by laying out the type above.
605
606 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
607 to compute the Ada size; the GCC size and alignment (for rep'ed records
608 that are not padding types); and the mode (for rep'ed records). We also
609 clear the DECL_BIT_FIELD indication for the cases we know have not been
610 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
611
612 if (code == QUAL_UNION_TYPE)
613 fieldlist = nreverse (fieldlist);
614
615 for (field = fieldlist; field; field = TREE_CHAIN (field))
616 {
617 tree type = TREE_TYPE (field);
618 tree pos = bit_position (field);
619 tree this_size = DECL_SIZE (field);
620 tree this_ada_size;
621
622 if ((TREE_CODE (type) == RECORD_TYPE
623 || TREE_CODE (type) == UNION_TYPE
624 || TREE_CODE (type) == QUAL_UNION_TYPE)
625 && !TYPE_IS_FAT_POINTER_P (type)
626 && !TYPE_CONTAINS_TEMPLATE_P (type)
627 && TYPE_ADA_SIZE (type))
628 this_ada_size = TYPE_ADA_SIZE (type);
629 else
630 this_ada_size = this_size;
631
632 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
633 if (DECL_BIT_FIELD (field)
634 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
635 {
636 unsigned int align = TYPE_ALIGN (type);
637
638 /* In the general case, type alignment is required. */
639 if (value_factor_p (pos, align))
640 {
641 /* The enclosing record type must be sufficiently aligned.
642 Otherwise, if no alignment was specified for it and it
643 has been laid out already, bump its alignment to the
644 desired one if this is compatible with its size. */
645 if (TYPE_ALIGN (record_type) >= align)
646 {
647 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
648 DECL_BIT_FIELD (field) = 0;
649 }
650 else if (!had_align
651 && rep_level == 0
652 && value_factor_p (TYPE_SIZE (record_type), align))
653 {
654 TYPE_ALIGN (record_type) = align;
655 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
656 DECL_BIT_FIELD (field) = 0;
657 }
658 }
659
660 /* In the non-strict alignment case, only byte alignment is. */
661 if (!STRICT_ALIGNMENT
662 && DECL_BIT_FIELD (field)
663 && value_factor_p (pos, BITS_PER_UNIT))
664 DECL_BIT_FIELD (field) = 0;
665 }
666
667 /* If we still have DECL_BIT_FIELD set at this point, we know that the
668 field is technically not addressable. Except that it can actually
669 be addressed if it is BLKmode and happens to be properly aligned. */
670 if (DECL_BIT_FIELD (field)
671 && !(DECL_MODE (field) == BLKmode
672 && value_factor_p (pos, BITS_PER_UNIT)))
673 DECL_NONADDRESSABLE_P (field) = 1;
674
675 /* A type must be as aligned as its most aligned field that is not
676 a bit-field. But this is already enforced by layout_type. */
677 if (rep_level > 0 && !DECL_BIT_FIELD (field))
678 TYPE_ALIGN (record_type)
679 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
680
681 switch (code)
682 {
683 case UNION_TYPE:
684 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
685 size = size_binop (MAX_EXPR, size, this_size);
686 break;
687
688 case QUAL_UNION_TYPE:
689 ada_size
690 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
691 this_ada_size, ada_size);
692 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
693 this_size, size);
694 break;
695
696 case RECORD_TYPE:
697 /* Since we know here that all fields are sorted in order of
698 increasing bit position, the size of the record is one
699 higher than the ending bit of the last field processed
700 unless we have a rep clause, since in that case we might
701 have a field outside a QUAL_UNION_TYPE that has a higher ending
702 position. So use a MAX in that case. Also, if this field is a
703 QUAL_UNION_TYPE, we need to take into account the previous size in
704 the case of empty variants. */
705 ada_size
706 = merge_sizes (ada_size, pos, this_ada_size,
707 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
708 size
709 = merge_sizes (size, pos, this_size,
710 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
711 break;
712
713 default:
714 gcc_unreachable ();
715 }
716 }
717
718 if (code == QUAL_UNION_TYPE)
719 nreverse (fieldlist);
720
721 /* If the type is discriminated, it can be used to access all its
722 constrained subtypes, so force structural equality checks. */
723 if (CONTAINS_PLACEHOLDER_P (size))
724 SET_TYPE_STRUCTURAL_EQUALITY (record_type);
725
726 if (rep_level < 2)
727 {
728 /* If this is a padding record, we never want to make the size smaller
729 than what was specified in it, if any. */
730 if (TREE_CODE (record_type) == RECORD_TYPE
731 && TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
732 size = TYPE_SIZE (record_type);
733
734 /* Now set any of the values we've just computed that apply. */
735 if (!TYPE_IS_FAT_POINTER_P (record_type)
736 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
737 SET_TYPE_ADA_SIZE (record_type, ada_size);
738
739 if (rep_level > 0)
740 {
741 tree size_unit = had_size_unit
742 ? TYPE_SIZE_UNIT (record_type)
743 : convert (sizetype,
744 size_binop (CEIL_DIV_EXPR, size,
745 bitsize_unit_node));
746 unsigned int align = TYPE_ALIGN (record_type);
747
748 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
749 TYPE_SIZE_UNIT (record_type)
750 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
751
752 compute_record_mode (record_type);
753 }
754 }
755
756 if (!do_not_finalize)
757 rest_of_record_type_compilation (record_type);
758 }
759
760 /* Wrap up compilation of RECORD_TYPE, i.e. most notably output all
761 the debug information associated with it. It need not be invoked
762 directly in most cases since finish_record_type takes care of doing
763 so, unless explicitly requested not to through DO_NOT_FINALIZE. */
764
765 void
766 rest_of_record_type_compilation (tree record_type)
767 {
768 tree fieldlist = TYPE_FIELDS (record_type);
769 tree field;
770 enum tree_code code = TREE_CODE (record_type);
771 bool var_size = false;
772
773 for (field = fieldlist; field; field = TREE_CHAIN (field))
774 {
775 /* We need to make an XVE/XVU record if any field has variable size,
776 whether or not the record does. For example, if we have a union,
777 it may be that all fields, rounded up to the alignment, have the
778 same size, in which case we'll use that size. But the debug
779 output routines (except Dwarf2) won't be able to output the fields,
780 so we need to make the special record. */
781 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
782 /* If a field has a non-constant qualifier, the record will have
783 variable size too. */
784 || (code == QUAL_UNION_TYPE
785 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
786 {
787 var_size = true;
788 break;
789 }
790 }
791
792 /* If this record is of variable size, rename it so that the
793 debugger knows it is and make a new, parallel, record
794 that tells the debugger how the record is laid out. See
795 exp_dbug.ads. But don't do this for records that are padding
796 since they confuse GDB. */
797 if (var_size
798 && !(TREE_CODE (record_type) == RECORD_TYPE
799 && TYPE_IS_PADDING_P (record_type)))
800 {
801 tree new_record_type
802 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
803 ? UNION_TYPE : TREE_CODE (record_type));
804 tree orig_name = TYPE_NAME (record_type), new_name;
805 tree last_pos = bitsize_zero_node;
806 tree old_field, prev_old_field = NULL_TREE;
807
808 if (TREE_CODE (orig_name) == TYPE_DECL)
809 orig_name = DECL_NAME (orig_name);
810
811 new_name
812 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
813 ? "XVU" : "XVE");
814 TYPE_NAME (new_record_type) = new_name;
815 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
816 TYPE_STUB_DECL (new_record_type)
817 = create_type_stub_decl (new_name, new_record_type);
818 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
819 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
820 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
821 TYPE_SIZE_UNIT (new_record_type)
822 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
823
824 add_parallel_type (TYPE_STUB_DECL (record_type), new_record_type);
825
826 /* Now scan all the fields, replacing each field with a new
827 field corresponding to the new encoding. */
828 for (old_field = TYPE_FIELDS (record_type); old_field;
829 old_field = TREE_CHAIN (old_field))
830 {
831 tree field_type = TREE_TYPE (old_field);
832 tree field_name = DECL_NAME (old_field);
833 tree new_field;
834 tree curpos = bit_position (old_field);
835 bool var = false;
836 unsigned int align = 0;
837 tree pos;
838
839 /* See how the position was modified from the last position.
840
841 There are two basic cases we support: a value was added
842 to the last position or the last position was rounded to
843 a boundary and they something was added. Check for the
844 first case first. If not, see if there is any evidence
845 of rounding. If so, round the last position and try
846 again.
847
848 If this is a union, the position can be taken as zero. */
849
850 /* Some computations depend on the shape of the position expression,
851 so strip conversions to make sure it's exposed. */
852 curpos = remove_conversions (curpos, true);
853
854 if (TREE_CODE (new_record_type) == UNION_TYPE)
855 pos = bitsize_zero_node, align = 0;
856 else
857 pos = compute_related_constant (curpos, last_pos);
858
859 if (!pos && TREE_CODE (curpos) == MULT_EXPR
860 && host_integerp (TREE_OPERAND (curpos, 1), 1))
861 {
862 tree offset = TREE_OPERAND (curpos, 0);
863 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
864
865 /* An offset which is a bitwise AND with a negative power of 2
866 means an alignment corresponding to this power of 2. */
867 offset = remove_conversions (offset, true);
868 if (TREE_CODE (offset) == BIT_AND_EXPR
869 && host_integerp (TREE_OPERAND (offset, 1), 0)
870 && tree_int_cst_sgn (TREE_OPERAND (offset, 1)) < 0)
871 {
872 unsigned int pow
873 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
874 if (exact_log2 (pow) > 0)
875 align *= pow;
876 }
877
878 pos = compute_related_constant (curpos,
879 round_up (last_pos, align));
880 }
881 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
882 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
883 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
884 && host_integerp (TREE_OPERAND
885 (TREE_OPERAND (curpos, 0), 1),
886 1))
887 {
888 align
889 = tree_low_cst
890 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
891 pos = compute_related_constant (curpos,
892 round_up (last_pos, align));
893 }
894 else if (potential_alignment_gap (prev_old_field, old_field,
895 pos))
896 {
897 align = TYPE_ALIGN (field_type);
898 pos = compute_related_constant (curpos,
899 round_up (last_pos, align));
900 }
901
902 /* If we can't compute a position, set it to zero.
903
904 ??? We really should abort here, but it's too much work
905 to get this correct for all cases. */
906
907 if (!pos)
908 pos = bitsize_zero_node;
909
910 /* See if this type is variable-sized and make a pointer type
911 and indicate the indirection if so. Beware that the debug
912 back-end may adjust the position computed above according
913 to the alignment of the field type, i.e. the pointer type
914 in this case, if we don't preventively counter that. */
915 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
916 {
917 field_type = build_pointer_type (field_type);
918 if (align != 0 && TYPE_ALIGN (field_type) > align)
919 {
920 field_type = copy_node (field_type);
921 TYPE_ALIGN (field_type) = align;
922 }
923 var = true;
924 }
925
926 /* Make a new field name, if necessary. */
927 if (var || align != 0)
928 {
929 char suffix[16];
930
931 if (align != 0)
932 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
933 align / BITS_PER_UNIT);
934 else
935 strcpy (suffix, "XVL");
936
937 field_name = concat_name (field_name, suffix);
938 }
939
940 new_field = create_field_decl (field_name, field_type,
941 new_record_type, 0,
942 DECL_SIZE (old_field), pos, 0);
943 TREE_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
944 TYPE_FIELDS (new_record_type) = new_field;
945
946 /* If old_field is a QUAL_UNION_TYPE, take its size as being
947 zero. The only time it's not the last field of the record
948 is when there are other components at fixed positions after
949 it (meaning there was a rep clause for every field) and we
950 want to be able to encode them. */
951 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
952 (TREE_CODE (TREE_TYPE (old_field))
953 == QUAL_UNION_TYPE)
954 ? bitsize_zero_node
955 : DECL_SIZE (old_field));
956 prev_old_field = old_field;
957 }
958
959 TYPE_FIELDS (new_record_type)
960 = nreverse (TYPE_FIELDS (new_record_type));
961
962 rest_of_type_decl_compilation (TYPE_STUB_DECL (new_record_type));
963 }
964
965 rest_of_type_decl_compilation (TYPE_STUB_DECL (record_type));
966 }
967
968 /* Append PARALLEL_TYPE on the chain of parallel types for decl. */
969
970 void
971 add_parallel_type (tree decl, tree parallel_type)
972 {
973 tree d = decl;
974
975 while (DECL_PARALLEL_TYPE (d))
976 d = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (d));
977
978 SET_DECL_PARALLEL_TYPE (d, parallel_type);
979 }
980
981 /* Return the parallel type associated to a type, if any. */
982
983 tree
984 get_parallel_type (tree type)
985 {
986 if (TYPE_STUB_DECL (type))
987 return DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type));
988 else
989 return NULL_TREE;
990 }
991
992 /* Utility function of above to merge LAST_SIZE, the previous size of a record
993 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
994 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
995 replace a value of zero with the old size. If HAS_REP is true, we take the
996 MAX of the end position of this field with LAST_SIZE. In all other cases,
997 we use FIRST_BIT plus SIZE. Return an expression for the size. */
998
999 static tree
1000 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1001 bool has_rep)
1002 {
1003 tree type = TREE_TYPE (last_size);
1004 tree new;
1005
1006 if (!special || TREE_CODE (size) != COND_EXPR)
1007 {
1008 new = size_binop (PLUS_EXPR, first_bit, size);
1009 if (has_rep)
1010 new = size_binop (MAX_EXPR, last_size, new);
1011 }
1012
1013 else
1014 new = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1015 integer_zerop (TREE_OPERAND (size, 1))
1016 ? last_size : merge_sizes (last_size, first_bit,
1017 TREE_OPERAND (size, 1),
1018 1, has_rep),
1019 integer_zerop (TREE_OPERAND (size, 2))
1020 ? last_size : merge_sizes (last_size, first_bit,
1021 TREE_OPERAND (size, 2),
1022 1, has_rep));
1023
1024 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1025 when fed through substitute_in_expr) into thinking that a constant
1026 size is not constant. */
1027 while (TREE_CODE (new) == NON_LVALUE_EXPR)
1028 new = TREE_OPERAND (new, 0);
1029
1030 return new;
1031 }
1032
1033 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1034 related by the addition of a constant. Return that constant if so. */
1035
1036 static tree
1037 compute_related_constant (tree op0, tree op1)
1038 {
1039 tree op0_var, op1_var;
1040 tree op0_con = split_plus (op0, &op0_var);
1041 tree op1_con = split_plus (op1, &op1_var);
1042 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1043
1044 if (operand_equal_p (op0_var, op1_var, 0))
1045 return result;
1046 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1047 return result;
1048 else
1049 return 0;
1050 }
1051
1052 /* Utility function of above to split a tree OP which may be a sum, into a
1053 constant part, which is returned, and a variable part, which is stored
1054 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1055 bitsizetype. */
1056
1057 static tree
1058 split_plus (tree in, tree *pvar)
1059 {
1060 /* Strip NOPS in order to ease the tree traversal and maximize the
1061 potential for constant or plus/minus discovery. We need to be careful
1062 to always return and set *pvar to bitsizetype trees, but it's worth
1063 the effort. */
1064 STRIP_NOPS (in);
1065
1066 *pvar = convert (bitsizetype, in);
1067
1068 if (TREE_CODE (in) == INTEGER_CST)
1069 {
1070 *pvar = bitsize_zero_node;
1071 return convert (bitsizetype, in);
1072 }
1073 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1074 {
1075 tree lhs_var, rhs_var;
1076 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1077 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1078
1079 if (lhs_var == TREE_OPERAND (in, 0)
1080 && rhs_var == TREE_OPERAND (in, 1))
1081 return bitsize_zero_node;
1082
1083 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1084 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1085 }
1086 else
1087 return bitsize_zero_node;
1088 }
1089 \f
1090 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1091 subprogram. If it is void_type_node, then we are dealing with a procedure,
1092 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1093 PARM_DECL nodes that are the subprogram arguments. CICO_LIST is the
1094 copy-in/copy-out list to be stored into TYPE_CICO_LIST.
1095 RETURNS_UNCONSTRAINED is true if the function returns an unconstrained
1096 object. RETURNS_BY_REF is true if the function returns by reference.
1097 RETURNS_BY_TARGET_PTR is true if the function is to be passed (as its
1098 first parameter) the address of the place to copy its result. */
1099
1100 tree
1101 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1102 bool returns_unconstrained, bool returns_by_ref,
1103 bool returns_by_target_ptr)
1104 {
1105 /* A chain of TREE_LIST nodes whose TREE_VALUEs are the data type nodes of
1106 the subprogram formal parameters. This list is generated by traversing the
1107 input list of PARM_DECL nodes. */
1108 tree param_type_list = NULL;
1109 tree param_decl;
1110 tree type;
1111
1112 for (param_decl = param_decl_list; param_decl;
1113 param_decl = TREE_CHAIN (param_decl))
1114 param_type_list = tree_cons (NULL_TREE, TREE_TYPE (param_decl),
1115 param_type_list);
1116
1117 /* The list of the function parameter types has to be terminated by the void
1118 type to signal to the back-end that we are not dealing with a variable
1119 parameter subprogram, but that the subprogram has a fixed number of
1120 parameters. */
1121 param_type_list = tree_cons (NULL_TREE, void_type_node, param_type_list);
1122
1123 /* The list of argument types has been created in reverse
1124 so nreverse it. */
1125 param_type_list = nreverse (param_type_list);
1126
1127 type = build_function_type (return_type, param_type_list);
1128
1129 /* TYPE may have been shared since GCC hashes types. If it has a CICO_LIST
1130 or the new type should, make a copy of TYPE. Likewise for
1131 RETURNS_UNCONSTRAINED and RETURNS_BY_REF. */
1132 if (TYPE_CI_CO_LIST (type) || cico_list
1133 || TYPE_RETURNS_UNCONSTRAINED_P (type) != returns_unconstrained
1134 || TYPE_RETURNS_BY_REF_P (type) != returns_by_ref
1135 || TYPE_RETURNS_BY_TARGET_PTR_P (type) != returns_by_target_ptr)
1136 type = copy_type (type);
1137
1138 TYPE_CI_CO_LIST (type) = cico_list;
1139 TYPE_RETURNS_UNCONSTRAINED_P (type) = returns_unconstrained;
1140 TYPE_RETURNS_BY_REF_P (type) = returns_by_ref;
1141 TYPE_RETURNS_BY_TARGET_PTR_P (type) = returns_by_target_ptr;
1142 return type;
1143 }
1144 \f
1145 /* Return a copy of TYPE but safe to modify in any way. */
1146
1147 tree
1148 copy_type (tree type)
1149 {
1150 tree new = copy_node (type);
1151
1152 /* copy_node clears this field instead of copying it, because it is
1153 aliased with TREE_CHAIN. */
1154 TYPE_STUB_DECL (new) = TYPE_STUB_DECL (type);
1155
1156 TYPE_POINTER_TO (new) = 0;
1157 TYPE_REFERENCE_TO (new) = 0;
1158 TYPE_MAIN_VARIANT (new) = new;
1159 TYPE_NEXT_VARIANT (new) = 0;
1160
1161 return new;
1162 }
1163 \f
1164 /* Return a subtype of sizetype with range MIN to MAX and whose
1165 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
1166 of the associated TYPE_DECL. */
1167
1168 tree
1169 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
1170 {
1171 /* First build a type for the desired range. */
1172 tree type = build_index_2_type (min, max);
1173
1174 /* If this type has the TYPE_INDEX_TYPE we want, return it. */
1175 if (TYPE_INDEX_TYPE (type) == index)
1176 return type;
1177
1178 /* Otherwise, if TYPE_INDEX_TYPE is set, make a copy. Note that we have
1179 no way of sharing these types, but that's only a small hole. */
1180 if (TYPE_INDEX_TYPE (type))
1181 type = copy_type (type);
1182
1183 SET_TYPE_INDEX_TYPE (type, index);
1184 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
1185
1186 return type;
1187 }
1188 \f
1189 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
1190 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
1191 its data type. */
1192
1193 tree
1194 create_type_stub_decl (tree type_name, tree type)
1195 {
1196 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
1197 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
1198 emitted in DWARF. */
1199 tree type_decl = build_decl (TYPE_DECL, type_name, type);
1200 DECL_ARTIFICIAL (type_decl) = 1;
1201 return type_decl;
1202 }
1203
1204 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
1205 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
1206 is a declaration that was generated by the compiler. DEBUG_INFO_P is
1207 true if we need to write debug information about this type. GNAT_NODE
1208 is used for the position of the decl. */
1209
1210 tree
1211 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
1212 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
1213 {
1214 enum tree_code code = TREE_CODE (type);
1215 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
1216 tree type_decl;
1217
1218 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
1219 gcc_assert (!TYPE_IS_DUMMY_P (type));
1220
1221 /* If the type hasn't been named yet, we're naming it; preserve an existing
1222 TYPE_STUB_DECL that has been attached to it for some purpose. */
1223 if (!named && TYPE_STUB_DECL (type))
1224 {
1225 type_decl = TYPE_STUB_DECL (type);
1226 DECL_NAME (type_decl) = type_name;
1227 }
1228 else
1229 type_decl = build_decl (TYPE_DECL, type_name, type);
1230
1231 DECL_ARTIFICIAL (type_decl) = artificial_p;
1232 gnat_pushdecl (type_decl, gnat_node);
1233 process_attributes (type_decl, attr_list);
1234
1235 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
1236 This causes the name to be also viewed as a "tag" by the debug
1237 back-end, with the advantage that no DW_TAG_typedef is emitted
1238 for artificial "tagged" types in DWARF. */
1239 if (!named)
1240 TYPE_STUB_DECL (type) = type_decl;
1241
1242 /* Pass the type declaration to the debug back-end unless this is an
1243 UNCONSTRAINED_ARRAY_TYPE that the back-end does not support, or a
1244 type for which debugging information was not requested, or else an
1245 ENUMERAL_TYPE or RECORD_TYPE (except for fat pointers) which are
1246 handled separately. And do not pass dummy types either. */
1247 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
1248 DECL_IGNORED_P (type_decl) = 1;
1249 else if (code != ENUMERAL_TYPE
1250 && (code != RECORD_TYPE || TYPE_IS_FAT_POINTER_P (type))
1251 && !((code == POINTER_TYPE || code == REFERENCE_TYPE)
1252 && TYPE_IS_DUMMY_P (TREE_TYPE (type)))
1253 && !(code == RECORD_TYPE
1254 && TYPE_IS_DUMMY_P
1255 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))))))
1256 rest_of_type_decl_compilation (type_decl);
1257
1258 return type_decl;
1259 }
1260 \f
1261 /* Return a VAR_DECL or CONST_DECL node.
1262
1263 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
1264 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
1265 the GCC tree for an optional initial expression; NULL_TREE if none.
1266
1267 CONST_FLAG is true if this variable is constant, in which case we might
1268 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
1269
1270 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
1271 definition to be made visible outside of the current compilation unit, for
1272 instance variable definitions in a package specification.
1273
1274 EXTERN_FLAG is true when processing an external variable declaration (as
1275 opposed to a definition: no storage is to be allocated for the variable).
1276
1277 STATIC_FLAG is only relevant when not at top level. In that case
1278 it indicates whether to always allocate storage to the variable.
1279
1280 GNAT_NODE is used for the position of the decl. */
1281
1282 tree
1283 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
1284 bool const_flag, bool public_flag, bool extern_flag,
1285 bool static_flag, bool const_decl_allowed_p,
1286 struct attrib *attr_list, Node_Id gnat_node)
1287 {
1288 bool init_const
1289 = (var_init != 0
1290 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
1291 && (global_bindings_p () || static_flag
1292 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
1293 : TREE_CONSTANT (var_init)));
1294
1295 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
1296 case the initializer may be used in-lieu of the DECL node (as done in
1297 Identifier_to_gnu). This is useful to prevent the need of elaboration
1298 code when an identifier for which such a decl is made is in turn used as
1299 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
1300 but extra constraints apply to this choice (see below) and are not
1301 relevant to the distinction we wish to make. */
1302 bool constant_p = const_flag && init_const;
1303
1304 /* The actual DECL node. CONST_DECL was initially intended for enumerals
1305 and may be used for scalars in general but not for aggregates. */
1306 tree var_decl
1307 = build_decl ((constant_p && const_decl_allowed_p
1308 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
1309 var_name, type);
1310
1311 /* If this is external, throw away any initializations (they will be done
1312 elsewhere) unless this is a constant for which we would like to remain
1313 able to get the initializer. If we are defining a global here, leave a
1314 constant initialization and save any variable elaborations for the
1315 elaboration routine. If we are just annotating types, throw away the
1316 initialization if it isn't a constant. */
1317 if ((extern_flag && !constant_p)
1318 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
1319 var_init = NULL_TREE;
1320
1321 /* At the global level, an initializer requiring code to be generated
1322 produces elaboration statements. Check that such statements are allowed,
1323 that is, not violating a No_Elaboration_Code restriction. */
1324 if (global_bindings_p () && var_init != 0 && ! init_const)
1325 Check_Elaboration_Code_Allowed (gnat_node);
1326
1327 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
1328 try to fiddle with DECL_COMMON. However, on platforms that don't
1329 support global BSS sections, uninitialized global variables would
1330 go in DATA instead, thus increasing the size of the executable. */
1331 if (!flag_no_common
1332 && TREE_CODE (var_decl) == VAR_DECL
1333 && !have_global_bss_p ())
1334 DECL_COMMON (var_decl) = 1;
1335 DECL_INITIAL (var_decl) = var_init;
1336 TREE_READONLY (var_decl) = const_flag;
1337 DECL_EXTERNAL (var_decl) = extern_flag;
1338 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
1339 TREE_CONSTANT (var_decl) = constant_p;
1340 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
1341 = TYPE_VOLATILE (type);
1342
1343 /* If it's public and not external, always allocate storage for it.
1344 At the global binding level we need to allocate static storage for the
1345 variable if and only if it's not external. If we are not at the top level
1346 we allocate automatic storage unless requested not to. */
1347 TREE_STATIC (var_decl)
1348 = !extern_flag && (public_flag || static_flag || global_bindings_p ());
1349
1350 /* For an external constant whose initializer is not absolute, do not emit
1351 debug info. In DWARF this would mean a global relocation in a read-only
1352 section which runs afoul of the PE-COFF runtime relocation mechanism. */
1353 if (extern_flag
1354 && constant_p
1355 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
1356 != null_pointer_node)
1357 DECL_IGNORED_P (var_decl) = 1;
1358
1359 if (asm_name && VAR_OR_FUNCTION_DECL_P (var_decl))
1360 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
1361
1362 process_attributes (var_decl, attr_list);
1363
1364 /* Add this decl to the current binding level. */
1365 gnat_pushdecl (var_decl, gnat_node);
1366
1367 if (TREE_SIDE_EFFECTS (var_decl))
1368 TREE_ADDRESSABLE (var_decl) = 1;
1369
1370 if (TREE_CODE (var_decl) != CONST_DECL)
1371 {
1372 if (global_bindings_p ())
1373 rest_of_decl_compilation (var_decl, true, 0);
1374 }
1375 else
1376 expand_decl (var_decl);
1377
1378 return var_decl;
1379 }
1380 \f
1381 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
1382
1383 static bool
1384 aggregate_type_contains_array_p (tree type)
1385 {
1386 switch (TREE_CODE (type))
1387 {
1388 case RECORD_TYPE:
1389 case UNION_TYPE:
1390 case QUAL_UNION_TYPE:
1391 {
1392 tree field;
1393 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1394 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
1395 && aggregate_type_contains_array_p (TREE_TYPE (field)))
1396 return true;
1397 return false;
1398 }
1399
1400 case ARRAY_TYPE:
1401 return true;
1402
1403 default:
1404 gcc_unreachable ();
1405 }
1406 }
1407
1408 /* Return a FIELD_DECL node. FIELD_NAME the field name, FIELD_TYPE is its
1409 type, and RECORD_TYPE is the type of the parent. PACKED is nonzero if
1410 this field is in a record type with a "pragma pack". If SIZE is nonzero
1411 it is the specified size for this field. If POS is nonzero, it is the bit
1412 position. If ADDRESSABLE is nonzero, it means we are allowed to take
1413 the address of this field for aliasing purposes. If it is negative, we
1414 should not make a bitfield, which is used by make_aligning_type. */
1415
1416 tree
1417 create_field_decl (tree field_name, tree field_type, tree record_type,
1418 int packed, tree size, tree pos, int addressable)
1419 {
1420 tree field_decl = build_decl (FIELD_DECL, field_name, field_type);
1421
1422 DECL_CONTEXT (field_decl) = record_type;
1423 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
1424
1425 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
1426 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
1427 Likewise for an aggregate without specified position that contains an
1428 array, because in this case slices of variable length of this array
1429 must be handled by GCC and variable-sized objects need to be aligned
1430 to at least a byte boundary. */
1431 if (packed && (TYPE_MODE (field_type) == BLKmode
1432 || (!pos
1433 && AGGREGATE_TYPE_P (field_type)
1434 && aggregate_type_contains_array_p (field_type))))
1435 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
1436
1437 /* If a size is specified, use it. Otherwise, if the record type is packed
1438 compute a size to use, which may differ from the object's natural size.
1439 We always set a size in this case to trigger the checks for bitfield
1440 creation below, which is typically required when no position has been
1441 specified. */
1442 if (size)
1443 size = convert (bitsizetype, size);
1444 else if (packed == 1)
1445 {
1446 size = rm_size (field_type);
1447
1448 /* For a constant size larger than MAX_FIXED_MODE_SIZE, round up to
1449 byte. */
1450 if (TREE_CODE (size) == INTEGER_CST
1451 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) > 0)
1452 size = round_up (size, BITS_PER_UNIT);
1453 }
1454
1455 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
1456 specified for two reasons: first if the size differs from the natural
1457 size. Second, if the alignment is insufficient. There are a number of
1458 ways the latter can be true.
1459
1460 We never make a bitfield if the type of the field has a nonconstant size,
1461 because no such entity requiring bitfield operations should reach here.
1462
1463 We do *preventively* make a bitfield when there might be the need for it
1464 but we don't have all the necessary information to decide, as is the case
1465 of a field with no specified position in a packed record.
1466
1467 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
1468 in layout_decl or finish_record_type to clear the bit_field indication if
1469 it is in fact not needed. */
1470 if (addressable >= 0
1471 && size
1472 && TREE_CODE (size) == INTEGER_CST
1473 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
1474 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
1475 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
1476 || packed
1477 || (TYPE_ALIGN (record_type) != 0
1478 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
1479 {
1480 DECL_BIT_FIELD (field_decl) = 1;
1481 DECL_SIZE (field_decl) = size;
1482 if (!packed && !pos)
1483 {
1484 if (TYPE_ALIGN (record_type) != 0
1485 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
1486 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
1487 else
1488 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1489 }
1490 }
1491
1492 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
1493
1494 /* Bump the alignment if need be, either for bitfield/packing purposes or
1495 to satisfy the type requirements if no such consideration applies. When
1496 we get the alignment from the type, indicate if this is from an explicit
1497 user request, which prevents stor-layout from lowering it later on. */
1498 {
1499 unsigned int bit_align
1500 = (DECL_BIT_FIELD (field_decl) ? 1
1501 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
1502
1503 if (bit_align > DECL_ALIGN (field_decl))
1504 DECL_ALIGN (field_decl) = bit_align;
1505 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
1506 {
1507 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
1508 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
1509 }
1510 }
1511
1512 if (pos)
1513 {
1514 /* We need to pass in the alignment the DECL is known to have.
1515 This is the lowest-order bit set in POS, but no more than
1516 the alignment of the record, if one is specified. Note
1517 that an alignment of 0 is taken as infinite. */
1518 unsigned int known_align;
1519
1520 if (host_integerp (pos, 1))
1521 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
1522 else
1523 known_align = BITS_PER_UNIT;
1524
1525 if (TYPE_ALIGN (record_type)
1526 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
1527 known_align = TYPE_ALIGN (record_type);
1528
1529 layout_decl (field_decl, known_align);
1530 SET_DECL_OFFSET_ALIGN (field_decl,
1531 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
1532 : BITS_PER_UNIT);
1533 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
1534 &DECL_FIELD_BIT_OFFSET (field_decl),
1535 DECL_OFFSET_ALIGN (field_decl), pos);
1536 }
1537
1538 /* In addition to what our caller says, claim the field is addressable if we
1539 know that its type is not suitable.
1540
1541 The field may also be "technically" nonaddressable, meaning that even if
1542 we attempt to take the field's address we will actually get the address
1543 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
1544 value we have at this point is not accurate enough, so we don't account
1545 for this here and let finish_record_type decide. */
1546 if (!addressable && !type_for_nonaliased_component_p (field_type))
1547 addressable = 1;
1548
1549 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
1550
1551 return field_decl;
1552 }
1553 \f
1554 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
1555 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
1556 (either an In parameter or an address of a pass-by-ref parameter). */
1557
1558 tree
1559 create_param_decl (tree param_name, tree param_type, bool readonly)
1560 {
1561 tree param_decl = build_decl (PARM_DECL, param_name, param_type);
1562
1563 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
1564 can lead to various ABI violations. */
1565 if (targetm.calls.promote_prototypes (NULL_TREE)
1566 && INTEGRAL_TYPE_P (param_type)
1567 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
1568 {
1569 /* We have to be careful about biased types here. Make a subtype
1570 of integer_type_node with the proper biasing. */
1571 if (TREE_CODE (param_type) == INTEGER_TYPE
1572 && TYPE_BIASED_REPRESENTATION_P (param_type))
1573 {
1574 tree subtype = make_node (INTEGER_TYPE);
1575 TREE_TYPE (subtype) = integer_type_node;
1576 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
1577
1578 TYPE_UNSIGNED (subtype) = 1;
1579 TYPE_PRECISION (subtype) = TYPE_PRECISION (integer_type_node);
1580 TYPE_MIN_VALUE (subtype) = TYPE_MIN_VALUE (param_type);
1581 TYPE_MAX_VALUE (subtype) = TYPE_MAX_VALUE (param_type);
1582 layout_type (subtype);
1583
1584 param_type = subtype;
1585 }
1586 else
1587 param_type = integer_type_node;
1588 }
1589
1590 DECL_ARG_TYPE (param_decl) = param_type;
1591 TREE_READONLY (param_decl) = readonly;
1592 return param_decl;
1593 }
1594 \f
1595 /* Given a DECL and ATTR_LIST, process the listed attributes. */
1596
1597 void
1598 process_attributes (tree decl, struct attrib *attr_list)
1599 {
1600 for (; attr_list; attr_list = attr_list->next)
1601 switch (attr_list->type)
1602 {
1603 case ATTR_MACHINE_ATTRIBUTE:
1604 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
1605 NULL_TREE),
1606 ATTR_FLAG_TYPE_IN_PLACE);
1607 break;
1608
1609 case ATTR_LINK_ALIAS:
1610 if (! DECL_EXTERNAL (decl))
1611 {
1612 TREE_STATIC (decl) = 1;
1613 assemble_alias (decl, attr_list->name);
1614 }
1615 break;
1616
1617 case ATTR_WEAK_EXTERNAL:
1618 if (SUPPORTS_WEAK)
1619 declare_weak (decl);
1620 else
1621 post_error ("?weak declarations not supported on this target",
1622 attr_list->error_point);
1623 break;
1624
1625 case ATTR_LINK_SECTION:
1626 if (targetm.have_named_sections)
1627 {
1628 DECL_SECTION_NAME (decl)
1629 = build_string (IDENTIFIER_LENGTH (attr_list->name),
1630 IDENTIFIER_POINTER (attr_list->name));
1631 DECL_COMMON (decl) = 0;
1632 }
1633 else
1634 post_error ("?section attributes are not supported for this target",
1635 attr_list->error_point);
1636 break;
1637
1638 case ATTR_LINK_CONSTRUCTOR:
1639 DECL_STATIC_CONSTRUCTOR (decl) = 1;
1640 TREE_USED (decl) = 1;
1641 break;
1642
1643 case ATTR_LINK_DESTRUCTOR:
1644 DECL_STATIC_DESTRUCTOR (decl) = 1;
1645 TREE_USED (decl) = 1;
1646 break;
1647
1648 case ATTR_THREAD_LOCAL_STORAGE:
1649 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
1650 DECL_COMMON (decl) = 0;
1651 break;
1652 }
1653 }
1654 \f
1655 /* Record DECL as a global renaming pointer. */
1656
1657 void
1658 record_global_renaming_pointer (tree decl)
1659 {
1660 gcc_assert (DECL_RENAMED_OBJECT (decl));
1661 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
1662 }
1663
1664 /* Invalidate the global renaming pointers. */
1665
1666 void
1667 invalidate_global_renaming_pointers (void)
1668 {
1669 unsigned int i;
1670 tree iter;
1671
1672 for (i = 0; VEC_iterate(tree, global_renaming_pointers, i, iter); i++)
1673 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
1674
1675 VEC_free (tree, gc, global_renaming_pointers);
1676 }
1677
1678 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
1679 a power of 2. */
1680
1681 bool
1682 value_factor_p (tree value, HOST_WIDE_INT factor)
1683 {
1684 if (host_integerp (value, 1))
1685 return tree_low_cst (value, 1) % factor == 0;
1686
1687 if (TREE_CODE (value) == MULT_EXPR)
1688 return (value_factor_p (TREE_OPERAND (value, 0), factor)
1689 || value_factor_p (TREE_OPERAND (value, 1), factor));
1690
1691 return false;
1692 }
1693
1694 /* Given 2 consecutive field decls PREV_FIELD and CURR_FIELD, return true
1695 unless we can prove these 2 fields are laid out in such a way that no gap
1696 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
1697 is the distance in bits between the end of PREV_FIELD and the starting
1698 position of CURR_FIELD. It is ignored if null. */
1699
1700 static bool
1701 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
1702 {
1703 /* If this is the first field of the record, there cannot be any gap */
1704 if (!prev_field)
1705 return false;
1706
1707 /* If the previous field is a union type, then return False: The only
1708 time when such a field is not the last field of the record is when
1709 there are other components at fixed positions after it (meaning there
1710 was a rep clause for every field), in which case we don't want the
1711 alignment constraint to override them. */
1712 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
1713 return false;
1714
1715 /* If the distance between the end of prev_field and the beginning of
1716 curr_field is constant, then there is a gap if the value of this
1717 constant is not null. */
1718 if (offset && host_integerp (offset, 1))
1719 return !integer_zerop (offset);
1720
1721 /* If the size and position of the previous field are constant,
1722 then check the sum of this size and position. There will be a gap
1723 iff it is not multiple of the current field alignment. */
1724 if (host_integerp (DECL_SIZE (prev_field), 1)
1725 && host_integerp (bit_position (prev_field), 1))
1726 return ((tree_low_cst (bit_position (prev_field), 1)
1727 + tree_low_cst (DECL_SIZE (prev_field), 1))
1728 % DECL_ALIGN (curr_field) != 0);
1729
1730 /* If both the position and size of the previous field are multiples
1731 of the current field alignment, there cannot be any gap. */
1732 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
1733 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
1734 return false;
1735
1736 /* Fallback, return that there may be a potential gap */
1737 return true;
1738 }
1739
1740 /* Returns a LABEL_DECL node for LABEL_NAME. */
1741
1742 tree
1743 create_label_decl (tree label_name)
1744 {
1745 tree label_decl = build_decl (LABEL_DECL, label_name, void_type_node);
1746
1747 DECL_CONTEXT (label_decl) = current_function_decl;
1748 DECL_MODE (label_decl) = VOIDmode;
1749 DECL_SOURCE_LOCATION (label_decl) = input_location;
1750
1751 return label_decl;
1752 }
1753 \f
1754 /* Returns a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
1755 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
1756 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
1757 PARM_DECL nodes chained through the TREE_CHAIN field).
1758
1759 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, and ATTR_LIST are used to set the
1760 appropriate fields in the FUNCTION_DECL. GNAT_NODE gives the location. */
1761
1762 tree
1763 create_subprog_decl (tree subprog_name, tree asm_name,
1764 tree subprog_type, tree param_decl_list, bool inline_flag,
1765 bool public_flag, bool extern_flag,
1766 struct attrib *attr_list, Node_Id gnat_node)
1767 {
1768 tree return_type = TREE_TYPE (subprog_type);
1769 tree subprog_decl = build_decl (FUNCTION_DECL, subprog_name, subprog_type);
1770
1771 /* If this is a non-inline function nested inside an inlined external
1772 function, we cannot honor both requests without cloning the nested
1773 function in the current unit since it is private to the other unit.
1774 We could inline the nested function as well but it's probably better
1775 to err on the side of too little inlining. */
1776 if (!inline_flag
1777 && current_function_decl
1778 && DECL_DECLARED_INLINE_P (current_function_decl)
1779 && DECL_EXTERNAL (current_function_decl))
1780 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
1781
1782 DECL_EXTERNAL (subprog_decl) = extern_flag;
1783 TREE_PUBLIC (subprog_decl) = public_flag;
1784 TREE_STATIC (subprog_decl) = 1;
1785 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
1786 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
1787 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
1788 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
1789 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
1790 DECL_RESULT (subprog_decl) = build_decl (RESULT_DECL, 0, return_type);
1791 DECL_ARTIFICIAL (DECL_RESULT (subprog_decl)) = 1;
1792 DECL_IGNORED_P (DECL_RESULT (subprog_decl)) = 1;
1793
1794 /* TREE_ADDRESSABLE is set on the result type to request the use of the
1795 target by-reference return mechanism. This is not supported all the
1796 way down to RTL expansion with GCC 4, which ICEs on temporary creation
1797 attempts with such a type and expects DECL_BY_REFERENCE to be set on
1798 the RESULT_DECL instead - see gnat_genericize for more details. */
1799 if (TREE_ADDRESSABLE (TREE_TYPE (DECL_RESULT (subprog_decl))))
1800 {
1801 tree result_decl = DECL_RESULT (subprog_decl);
1802
1803 TREE_ADDRESSABLE (TREE_TYPE (result_decl)) = 0;
1804 DECL_BY_REFERENCE (result_decl) = 1;
1805 }
1806
1807 if (asm_name)
1808 {
1809 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
1810
1811 /* The expand_main_function circuitry expects "main_identifier_node" to
1812 designate the DECL_NAME of the 'main' entry point, in turn expected
1813 to be declared as the "main" function literally by default. Ada
1814 program entry points are typically declared with a different name
1815 within the binder generated file, exported as 'main' to satisfy the
1816 system expectations. Redirect main_identifier_node in this case. */
1817 if (asm_name == main_identifier_node)
1818 main_identifier_node = DECL_NAME (subprog_decl);
1819 }
1820
1821 process_attributes (subprog_decl, attr_list);
1822
1823 /* Add this decl to the current binding level. */
1824 gnat_pushdecl (subprog_decl, gnat_node);
1825
1826 /* Output the assembler code and/or RTL for the declaration. */
1827 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
1828
1829 return subprog_decl;
1830 }
1831 \f
1832 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
1833 body. This routine needs to be invoked before processing the declarations
1834 appearing in the subprogram. */
1835
1836 void
1837 begin_subprog_body (tree subprog_decl)
1838 {
1839 tree param_decl;
1840
1841 current_function_decl = subprog_decl;
1842 announce_function (subprog_decl);
1843
1844 /* Enter a new binding level and show that all the parameters belong to
1845 this function. */
1846 gnat_pushlevel ();
1847 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
1848 param_decl = TREE_CHAIN (param_decl))
1849 DECL_CONTEXT (param_decl) = subprog_decl;
1850
1851 make_decl_rtl (subprog_decl);
1852
1853 /* We handle pending sizes via the elaboration of types, so we don't need to
1854 save them. This causes them to be marked as part of the outer function
1855 and then discarded. */
1856 get_pending_sizes ();
1857 }
1858
1859
1860 /* Helper for the genericization callback. Return a dereference of VAL
1861 if it is of a reference type. */
1862
1863 static tree
1864 convert_from_reference (tree val)
1865 {
1866 tree value_type, ref;
1867
1868 if (TREE_CODE (TREE_TYPE (val)) != REFERENCE_TYPE)
1869 return val;
1870
1871 value_type = TREE_TYPE (TREE_TYPE (val));
1872 ref = build1 (INDIRECT_REF, value_type, val);
1873
1874 /* See if what we reference is CONST or VOLATILE, which requires
1875 looking into array types to get to the component type. */
1876
1877 while (TREE_CODE (value_type) == ARRAY_TYPE)
1878 value_type = TREE_TYPE (value_type);
1879
1880 TREE_READONLY (ref)
1881 = (TYPE_QUALS (value_type) & TYPE_QUAL_CONST);
1882 TREE_THIS_VOLATILE (ref)
1883 = (TYPE_QUALS (value_type) & TYPE_QUAL_VOLATILE);
1884
1885 TREE_SIDE_EFFECTS (ref)
1886 = (TREE_THIS_VOLATILE (ref) || TREE_SIDE_EFFECTS (val));
1887
1888 return ref;
1889 }
1890
1891 /* Helper for the genericization callback. Returns true if T denotes
1892 a RESULT_DECL with DECL_BY_REFERENCE set. */
1893
1894 static inline bool
1895 is_byref_result (tree t)
1896 {
1897 return (TREE_CODE (t) == RESULT_DECL && DECL_BY_REFERENCE (t));
1898 }
1899
1900
1901 /* Tree walking callback for gnat_genericize. Currently ...
1902
1903 o Adjust references to the function's DECL_RESULT if it is marked
1904 DECL_BY_REFERENCE and so has had its type turned into a reference
1905 type at the end of the function compilation. */
1906
1907 static tree
1908 gnat_genericize_r (tree *stmt_p, int *walk_subtrees, void *data)
1909 {
1910 /* This implementation is modeled after what the C++ front-end is
1911 doing, basis of the downstream passes behavior. */
1912
1913 tree stmt = *stmt_p;
1914 struct pointer_set_t *p_set = (struct pointer_set_t*) data;
1915
1916 /* If we have a direct mention of the result decl, dereference. */
1917 if (is_byref_result (stmt))
1918 {
1919 *stmt_p = convert_from_reference (stmt);
1920 *walk_subtrees = 0;
1921 return NULL;
1922 }
1923
1924 /* Otherwise, no need to walk the same tree twice. */
1925 if (pointer_set_contains (p_set, stmt))
1926 {
1927 *walk_subtrees = 0;
1928 return NULL_TREE;
1929 }
1930
1931 /* If we are taking the address of what now is a reference, just get the
1932 reference value. */
1933 if (TREE_CODE (stmt) == ADDR_EXPR
1934 && is_byref_result (TREE_OPERAND (stmt, 0)))
1935 {
1936 *stmt_p = convert (TREE_TYPE (stmt), TREE_OPERAND (stmt, 0));
1937 *walk_subtrees = 0;
1938 }
1939
1940 /* Don't dereference an by-reference RESULT_DECL inside a RETURN_EXPR. */
1941 else if (TREE_CODE (stmt) == RETURN_EXPR
1942 && TREE_OPERAND (stmt, 0)
1943 && is_byref_result (TREE_OPERAND (stmt, 0)))
1944 *walk_subtrees = 0;
1945
1946 /* Don't look inside trees that cannot embed references of interest. */
1947 else if (IS_TYPE_OR_DECL_P (stmt))
1948 *walk_subtrees = 0;
1949
1950 pointer_set_insert (p_set, *stmt_p);
1951
1952 return NULL;
1953 }
1954
1955 /* Perform lowering of Ada trees to GENERIC. In particular:
1956
1957 o Turn a DECL_BY_REFERENCE RESULT_DECL into a real by-reference decl
1958 and adjust all the references to this decl accordingly. */
1959
1960 static void
1961 gnat_genericize (tree fndecl)
1962 {
1963 /* Prior to GCC 4, an explicit By_Reference result mechanism for a function
1964 was handled by simply setting TREE_ADDRESSABLE on the result type.
1965 Everything required to actually pass by invisible ref using the target
1966 mechanism (e.g. extra parameter) was handled at RTL expansion time.
1967
1968 This doesn't work with GCC 4 any more for several reasons. First, the
1969 gimplification process might need the creation of temporaries of this
1970 type, and the gimplifier ICEs on such attempts. Second, the middle-end
1971 now relies on a different attribute for such cases (DECL_BY_REFERENCE on
1972 RESULT/PARM_DECLs), and expects the user invisible by-reference-ness to
1973 be explicitly accounted for by the front-end in the function body.
1974
1975 We achieve the complete transformation in two steps:
1976
1977 1/ create_subprog_decl performs early attribute tweaks: it clears
1978 TREE_ADDRESSABLE from the result type and sets DECL_BY_REFERENCE on
1979 the result decl. The former ensures that the bit isn't set in the GCC
1980 tree saved for the function, so prevents ICEs on temporary creation.
1981 The latter we use here to trigger the rest of the processing.
1982
1983 2/ This function performs the type transformation on the result decl
1984 and adjusts all the references to this decl from the function body
1985 accordingly.
1986
1987 Clearing TREE_ADDRESSABLE from the type differs from the C++ front-end
1988 strategy, which escapes the gimplifier temporary creation issues by
1989 creating it's own temporaries using TARGET_EXPR nodes. Our way relies
1990 on simple specific support code in aggregate_value_p to look at the
1991 target function result decl explicitly. */
1992
1993 struct pointer_set_t *p_set;
1994 tree decl_result = DECL_RESULT (fndecl);
1995
1996 if (!DECL_BY_REFERENCE (decl_result))
1997 return;
1998
1999 /* Make the DECL_RESULT explicitly by-reference and adjust all the
2000 occurrences in the function body using the common tree-walking facility.
2001 We want to see every occurrence of the result decl to adjust the
2002 referencing tree, so need to use our own pointer set to control which
2003 trees should be visited again or not. */
2004
2005 p_set = pointer_set_create ();
2006
2007 TREE_TYPE (decl_result) = build_reference_type (TREE_TYPE (decl_result));
2008 TREE_ADDRESSABLE (decl_result) = 0;
2009 relayout_decl (decl_result);
2010
2011 walk_tree (&DECL_SAVED_TREE (fndecl), gnat_genericize_r, p_set, NULL);
2012
2013 pointer_set_destroy (p_set);
2014 }
2015
2016 /* Finish the definition of the current subprogram BODY and compile it all the
2017 way to assembler language output. ELAB_P tells if this is called for an
2018 elaboration routine, to be entirely discarded if empty. */
2019
2020 void
2021 end_subprog_body (tree body, bool elab_p)
2022 {
2023 tree fndecl = current_function_decl;
2024
2025 /* Mark the BLOCK for this level as being for this function and pop the
2026 level. Since the vars in it are the parameters, clear them. */
2027 BLOCK_VARS (current_binding_level->block) = 0;
2028 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2029 DECL_INITIAL (fndecl) = current_binding_level->block;
2030 gnat_poplevel ();
2031
2032 /* We handle pending sizes via the elaboration of types, so we don't
2033 need to save them. */
2034 get_pending_sizes ();
2035
2036 /* Mark the RESULT_DECL as being in this subprogram. */
2037 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2038
2039 DECL_SAVED_TREE (fndecl) = body;
2040
2041 current_function_decl = DECL_CONTEXT (fndecl);
2042 set_cfun (NULL);
2043
2044 /* We cannot track the location of errors past this point. */
2045 error_gnat_node = Empty;
2046
2047 /* If we're only annotating types, don't actually compile this function. */
2048 if (type_annotate_only)
2049 return;
2050
2051 /* Perform the required pre-gimplification transformations on the tree. */
2052 gnat_genericize (fndecl);
2053
2054 /* We do different things for nested and non-nested functions.
2055 ??? This should be in cgraph. */
2056 if (!DECL_CONTEXT (fndecl))
2057 {
2058 gnat_gimplify_function (fndecl);
2059
2060 /* If this is an empty elaboration proc, just discard the node.
2061 Otherwise, compile further. */
2062 if (elab_p && empty_body_p (gimple_body (fndecl)))
2063 cgraph_remove_node (cgraph_node (fndecl));
2064 else
2065 cgraph_finalize_function (fndecl, false);
2066 }
2067 else
2068 /* Register this function with cgraph just far enough to get it
2069 added to our parent's nested function list. */
2070 (void) cgraph_node (fndecl);
2071 }
2072
2073 /* Convert FNDECL's code to GIMPLE and handle any nested functions. */
2074
2075 static void
2076 gnat_gimplify_function (tree fndecl)
2077 {
2078 struct cgraph_node *cgn;
2079
2080 dump_function (TDI_original, fndecl);
2081 gimplify_function_tree (fndecl);
2082 dump_function (TDI_generic, fndecl);
2083
2084 /* Convert all nested functions to GIMPLE now. We do things in this order
2085 so that items like VLA sizes are expanded properly in the context of the
2086 correct function. */
2087 cgn = cgraph_node (fndecl);
2088 for (cgn = cgn->nested; cgn; cgn = cgn->next_nested)
2089 gnat_gimplify_function (cgn->decl);
2090 }
2091
2092 tree
2093 gnat_builtin_function (tree decl)
2094 {
2095 gnat_pushdecl (decl, Empty);
2096 return decl;
2097 }
2098
2099 /* Return an integer type with the number of bits of precision given by
2100 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2101 it is a signed type. */
2102
2103 tree
2104 gnat_type_for_size (unsigned precision, int unsignedp)
2105 {
2106 tree t;
2107 char type_name[20];
2108
2109 if (precision <= 2 * MAX_BITS_PER_WORD
2110 && signed_and_unsigned_types[precision][unsignedp])
2111 return signed_and_unsigned_types[precision][unsignedp];
2112
2113 if (unsignedp)
2114 t = make_unsigned_type (precision);
2115 else
2116 t = make_signed_type (precision);
2117
2118 if (precision <= 2 * MAX_BITS_PER_WORD)
2119 signed_and_unsigned_types[precision][unsignedp] = t;
2120
2121 if (!TYPE_NAME (t))
2122 {
2123 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2124 TYPE_NAME (t) = get_identifier (type_name);
2125 }
2126
2127 return t;
2128 }
2129
2130 /* Likewise for floating-point types. */
2131
2132 static tree
2133 float_type_for_precision (int precision, enum machine_mode mode)
2134 {
2135 tree t;
2136 char type_name[20];
2137
2138 if (float_types[(int) mode])
2139 return float_types[(int) mode];
2140
2141 float_types[(int) mode] = t = make_node (REAL_TYPE);
2142 TYPE_PRECISION (t) = precision;
2143 layout_type (t);
2144
2145 gcc_assert (TYPE_MODE (t) == mode);
2146 if (!TYPE_NAME (t))
2147 {
2148 sprintf (type_name, "FLOAT_%d", precision);
2149 TYPE_NAME (t) = get_identifier (type_name);
2150 }
2151
2152 return t;
2153 }
2154
2155 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2156 an unsigned type; otherwise a signed type is returned. */
2157
2158 tree
2159 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2160 {
2161 if (mode == BLKmode)
2162 return NULL_TREE;
2163 else if (mode == VOIDmode)
2164 return void_type_node;
2165 else if (COMPLEX_MODE_P (mode))
2166 return NULL_TREE;
2167 else if (SCALAR_FLOAT_MODE_P (mode))
2168 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2169 else if (SCALAR_INT_MODE_P (mode))
2170 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2171 else
2172 return NULL_TREE;
2173 }
2174
2175 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2176
2177 tree
2178 gnat_unsigned_type (tree type_node)
2179 {
2180 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2181
2182 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2183 {
2184 type = copy_node (type);
2185 TREE_TYPE (type) = type_node;
2186 }
2187 else if (TREE_TYPE (type_node)
2188 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2189 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2190 {
2191 type = copy_node (type);
2192 TREE_TYPE (type) = TREE_TYPE (type_node);
2193 }
2194
2195 return type;
2196 }
2197
2198 /* Return the signed version of a TYPE_NODE, a scalar type. */
2199
2200 tree
2201 gnat_signed_type (tree type_node)
2202 {
2203 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2204
2205 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2206 {
2207 type = copy_node (type);
2208 TREE_TYPE (type) = type_node;
2209 }
2210 else if (TREE_TYPE (type_node)
2211 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2212 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2213 {
2214 type = copy_node (type);
2215 TREE_TYPE (type) = TREE_TYPE (type_node);
2216 }
2217
2218 return type;
2219 }
2220
2221 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2222 transparently converted to each other. */
2223
2224 int
2225 gnat_types_compatible_p (tree t1, tree t2)
2226 {
2227 enum tree_code code;
2228
2229 /* This is the default criterion. */
2230 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2231 return 1;
2232
2233 /* We only check structural equivalence here. */
2234 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2235 return 0;
2236
2237 /* Array types are also compatible if they are constrained and have
2238 the same component type and the same domain. */
2239 if (code == ARRAY_TYPE
2240 && TREE_TYPE (t1) == TREE_TYPE (t2)
2241 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2242 || (TYPE_DOMAIN (t1)
2243 && TYPE_DOMAIN (t2)
2244 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2245 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2246 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2247 TYPE_MAX_VALUE (TYPE_DOMAIN (t2))))))
2248 return 1;
2249
2250 /* Padding record types are also compatible if they pad the same
2251 type and have the same constant size. */
2252 if (code == RECORD_TYPE
2253 && TYPE_IS_PADDING_P (t1) && TYPE_IS_PADDING_P (t2)
2254 && TREE_TYPE (TYPE_FIELDS (t1)) == TREE_TYPE (TYPE_FIELDS (t2))
2255 && tree_int_cst_equal (TYPE_SIZE (t1), TYPE_SIZE (t2)))
2256 return 1;
2257
2258 return 0;
2259 }
2260 \f
2261 /* EXP is an expression for the size of an object. If this size contains
2262 discriminant references, replace them with the maximum (if MAX_P) or
2263 minimum (if !MAX_P) possible value of the discriminant. */
2264
2265 tree
2266 max_size (tree exp, bool max_p)
2267 {
2268 enum tree_code code = TREE_CODE (exp);
2269 tree type = TREE_TYPE (exp);
2270
2271 switch (TREE_CODE_CLASS (code))
2272 {
2273 case tcc_declaration:
2274 case tcc_constant:
2275 return exp;
2276
2277 case tcc_vl_exp:
2278 if (code == CALL_EXPR)
2279 {
2280 tree *argarray;
2281 int i, n = call_expr_nargs (exp);
2282 gcc_assert (n > 0);
2283
2284 argarray = (tree *) alloca (n * sizeof (tree));
2285 for (i = 0; i < n; i++)
2286 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
2287 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
2288 }
2289 break;
2290
2291 case tcc_reference:
2292 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
2293 modify. Otherwise, we treat it like a variable. */
2294 if (!CONTAINS_PLACEHOLDER_P (exp))
2295 return exp;
2296
2297 type = TREE_TYPE (TREE_OPERAND (exp, 1));
2298 return
2299 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
2300
2301 case tcc_comparison:
2302 return max_p ? size_one_node : size_zero_node;
2303
2304 case tcc_unary:
2305 case tcc_binary:
2306 case tcc_expression:
2307 switch (TREE_CODE_LENGTH (code))
2308 {
2309 case 1:
2310 if (code == NON_LVALUE_EXPR)
2311 return max_size (TREE_OPERAND (exp, 0), max_p);
2312 else
2313 return
2314 fold_build1 (code, type,
2315 max_size (TREE_OPERAND (exp, 0),
2316 code == NEGATE_EXPR ? !max_p : max_p));
2317
2318 case 2:
2319 if (code == COMPOUND_EXPR)
2320 return max_size (TREE_OPERAND (exp, 1), max_p);
2321
2322 /* Calculate "(A ? B : C) - D" as "A ? B - D : C - D" which
2323 may provide a tighter bound on max_size. */
2324 if (code == MINUS_EXPR
2325 && TREE_CODE (TREE_OPERAND (exp, 0)) == COND_EXPR)
2326 {
2327 tree lhs = fold_build2 (MINUS_EXPR, type,
2328 TREE_OPERAND (TREE_OPERAND (exp, 0), 1),
2329 TREE_OPERAND (exp, 1));
2330 tree rhs = fold_build2 (MINUS_EXPR, type,
2331 TREE_OPERAND (TREE_OPERAND (exp, 0), 2),
2332 TREE_OPERAND (exp, 1));
2333 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2334 max_size (lhs, max_p),
2335 max_size (rhs, max_p));
2336 }
2337
2338 {
2339 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
2340 tree rhs = max_size (TREE_OPERAND (exp, 1),
2341 code == MINUS_EXPR ? !max_p : max_p);
2342
2343 /* Special-case wanting the maximum value of a MIN_EXPR.
2344 In that case, if one side overflows, return the other.
2345 sizetype is signed, but we know sizes are non-negative.
2346 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
2347 overflowing or the maximum possible value and the RHS
2348 a variable. */
2349 if (max_p
2350 && code == MIN_EXPR
2351 && TREE_CODE (rhs) == INTEGER_CST
2352 && TREE_OVERFLOW (rhs))
2353 return lhs;
2354 else if (max_p
2355 && code == MIN_EXPR
2356 && TREE_CODE (lhs) == INTEGER_CST
2357 && TREE_OVERFLOW (lhs))
2358 return rhs;
2359 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
2360 && ((TREE_CODE (lhs) == INTEGER_CST
2361 && TREE_OVERFLOW (lhs))
2362 || operand_equal_p (lhs, TYPE_MAX_VALUE (type), 0))
2363 && !TREE_CONSTANT (rhs))
2364 return lhs;
2365 else
2366 return fold_build2 (code, type, lhs, rhs);
2367 }
2368
2369 case 3:
2370 if (code == SAVE_EXPR)
2371 return exp;
2372 else if (code == COND_EXPR)
2373 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
2374 max_size (TREE_OPERAND (exp, 1), max_p),
2375 max_size (TREE_OPERAND (exp, 2), max_p));
2376 }
2377
2378 /* Other tree classes cannot happen. */
2379 default:
2380 break;
2381 }
2382
2383 gcc_unreachable ();
2384 }
2385 \f
2386 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
2387 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
2388 Return a constructor for the template. */
2389
2390 tree
2391 build_template (tree template_type, tree array_type, tree expr)
2392 {
2393 tree template_elts = NULL_TREE;
2394 tree bound_list = NULL_TREE;
2395 tree field;
2396
2397 while (TREE_CODE (array_type) == RECORD_TYPE
2398 && (TYPE_IS_PADDING_P (array_type)
2399 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
2400 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
2401
2402 if (TREE_CODE (array_type) == ARRAY_TYPE
2403 || (TREE_CODE (array_type) == INTEGER_TYPE
2404 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
2405 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
2406
2407 /* First make the list for a CONSTRUCTOR for the template. Go down the
2408 field list of the template instead of the type chain because this
2409 array might be an Ada array of arrays and we can't tell where the
2410 nested arrays stop being the underlying object. */
2411
2412 for (field = TYPE_FIELDS (template_type); field;
2413 (bound_list
2414 ? (bound_list = TREE_CHAIN (bound_list))
2415 : (array_type = TREE_TYPE (array_type))),
2416 field = TREE_CHAIN (TREE_CHAIN (field)))
2417 {
2418 tree bounds, min, max;
2419
2420 /* If we have a bound list, get the bounds from there. Likewise
2421 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
2422 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
2423 This will give us a maximum range. */
2424 if (bound_list)
2425 bounds = TREE_VALUE (bound_list);
2426 else if (TREE_CODE (array_type) == ARRAY_TYPE)
2427 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
2428 else if (expr && TREE_CODE (expr) == PARM_DECL
2429 && DECL_BY_COMPONENT_PTR_P (expr))
2430 bounds = TREE_TYPE (field);
2431 else
2432 gcc_unreachable ();
2433
2434 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
2435 max = convert (TREE_TYPE (TREE_CHAIN (field)), TYPE_MAX_VALUE (bounds));
2436
2437 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
2438 substitute it from OBJECT. */
2439 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
2440 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
2441
2442 template_elts = tree_cons (TREE_CHAIN (field), max,
2443 tree_cons (field, min, template_elts));
2444 }
2445
2446 return gnat_build_constructor (template_type, nreverse (template_elts));
2447 }
2448 \f
2449 /* Build a 32bit VMS descriptor from a Mechanism_Type, which must specify
2450 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2451 in the type contains in its DECL_INITIAL the expression to use when
2452 a constructor is made for the type. GNAT_ENTITY is an entity used
2453 to print out an error message if the mechanism cannot be applied to
2454 an object of that type and also for the name. */
2455
2456 tree
2457 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2458 {
2459 tree record_type = make_node (RECORD_TYPE);
2460 tree pointer32_type;
2461 tree field_list = 0;
2462 int class;
2463 int dtype = 0;
2464 tree inner_type;
2465 int ndim;
2466 int i;
2467 tree *idx_arr;
2468 tree tem;
2469
2470 /* If TYPE is an unconstrained array, use the underlying array type. */
2471 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2472 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2473
2474 /* If this is an array, compute the number of dimensions in the array,
2475 get the index types, and point to the inner type. */
2476 if (TREE_CODE (type) != ARRAY_TYPE)
2477 ndim = 0;
2478 else
2479 for (ndim = 1, inner_type = type;
2480 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2481 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2482 ndim++, inner_type = TREE_TYPE (inner_type))
2483 ;
2484
2485 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2486
2487 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
2488 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2489 for (i = ndim - 1, inner_type = type;
2490 i >= 0;
2491 i--, inner_type = TREE_TYPE (inner_type))
2492 idx_arr[i] = TYPE_DOMAIN (inner_type);
2493 else
2494 for (i = 0, inner_type = type;
2495 i < ndim;
2496 i++, inner_type = TREE_TYPE (inner_type))
2497 idx_arr[i] = TYPE_DOMAIN (inner_type);
2498
2499 /* Now get the DTYPE value. */
2500 switch (TREE_CODE (type))
2501 {
2502 case INTEGER_TYPE:
2503 case ENUMERAL_TYPE:
2504 case BOOLEAN_TYPE:
2505 if (TYPE_VAX_FLOATING_POINT_P (type))
2506 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2507 {
2508 case 6:
2509 dtype = 10;
2510 break;
2511 case 9:
2512 dtype = 11;
2513 break;
2514 case 15:
2515 dtype = 27;
2516 break;
2517 }
2518 else
2519 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2520 {
2521 case 8:
2522 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2523 break;
2524 case 16:
2525 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2526 break;
2527 case 32:
2528 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2529 break;
2530 case 64:
2531 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2532 break;
2533 case 128:
2534 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2535 break;
2536 }
2537 break;
2538
2539 case REAL_TYPE:
2540 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2541 break;
2542
2543 case COMPLEX_TYPE:
2544 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2545 && TYPE_VAX_FLOATING_POINT_P (type))
2546 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2547 {
2548 case 6:
2549 dtype = 12;
2550 break;
2551 case 9:
2552 dtype = 13;
2553 break;
2554 case 15:
2555 dtype = 29;
2556 }
2557 else
2558 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2559 break;
2560
2561 case ARRAY_TYPE:
2562 dtype = 14;
2563 break;
2564
2565 default:
2566 break;
2567 }
2568
2569 /* Get the CLASS value. */
2570 switch (mech)
2571 {
2572 case By_Descriptor_A:
2573 case By_Short_Descriptor_A:
2574 class = 4;
2575 break;
2576 case By_Descriptor_NCA:
2577 case By_Short_Descriptor_NCA:
2578 class = 10;
2579 break;
2580 case By_Descriptor_SB:
2581 case By_Short_Descriptor_SB:
2582 class = 15;
2583 break;
2584 case By_Descriptor:
2585 case By_Short_Descriptor:
2586 case By_Descriptor_S:
2587 case By_Short_Descriptor_S:
2588 default:
2589 class = 1;
2590 break;
2591 }
2592
2593 /* Make the type for a descriptor for VMS. The first four fields
2594 are the same for all types. */
2595
2596 field_list
2597 = chainon (field_list,
2598 make_descriptor_field
2599 ("LENGTH", gnat_type_for_size (16, 1), record_type,
2600 size_in_bytes ((mech == By_Descriptor_A ||
2601 mech == By_Short_Descriptor_A)
2602 ? inner_type : type)));
2603
2604 field_list = chainon (field_list,
2605 make_descriptor_field ("DTYPE",
2606 gnat_type_for_size (8, 1),
2607 record_type, size_int (dtype)));
2608 field_list = chainon (field_list,
2609 make_descriptor_field ("CLASS",
2610 gnat_type_for_size (8, 1),
2611 record_type, size_int (class)));
2612
2613 /* Of course this will crash at run-time if the address space is not
2614 within the low 32 bits, but there is nothing else we can do. */
2615 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
2616
2617 field_list
2618 = chainon (field_list,
2619 make_descriptor_field
2620 ("POINTER", pointer32_type, record_type,
2621 build_unary_op (ADDR_EXPR,
2622 pointer32_type,
2623 build0 (PLACEHOLDER_EXPR, type))));
2624
2625 switch (mech)
2626 {
2627 case By_Descriptor:
2628 case By_Short_Descriptor:
2629 case By_Descriptor_S:
2630 case By_Short_Descriptor_S:
2631 break;
2632
2633 case By_Descriptor_SB:
2634 case By_Short_Descriptor_SB:
2635 field_list
2636 = chainon (field_list,
2637 make_descriptor_field
2638 ("SB_L1", gnat_type_for_size (32, 1), record_type,
2639 TREE_CODE (type) == ARRAY_TYPE
2640 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2641 field_list
2642 = chainon (field_list,
2643 make_descriptor_field
2644 ("SB_U1", gnat_type_for_size (32, 1), record_type,
2645 TREE_CODE (type) == ARRAY_TYPE
2646 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2647 break;
2648
2649 case By_Descriptor_A:
2650 case By_Short_Descriptor_A:
2651 case By_Descriptor_NCA:
2652 case By_Short_Descriptor_NCA:
2653 field_list = chainon (field_list,
2654 make_descriptor_field ("SCALE",
2655 gnat_type_for_size (8, 1),
2656 record_type,
2657 size_zero_node));
2658
2659 field_list = chainon (field_list,
2660 make_descriptor_field ("DIGITS",
2661 gnat_type_for_size (8, 1),
2662 record_type,
2663 size_zero_node));
2664
2665 field_list
2666 = chainon (field_list,
2667 make_descriptor_field
2668 ("AFLAGS", gnat_type_for_size (8, 1), record_type,
2669 size_int ((mech == By_Descriptor_NCA ||
2670 mech == By_Short_Descriptor_NCA)
2671 ? 0
2672 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2673 : (TREE_CODE (type) == ARRAY_TYPE
2674 && TYPE_CONVENTION_FORTRAN_P (type)
2675 ? 224 : 192))));
2676
2677 field_list = chainon (field_list,
2678 make_descriptor_field ("DIMCT",
2679 gnat_type_for_size (8, 1),
2680 record_type,
2681 size_int (ndim)));
2682
2683 field_list = chainon (field_list,
2684 make_descriptor_field ("ARSIZE",
2685 gnat_type_for_size (32, 1),
2686 record_type,
2687 size_in_bytes (type)));
2688
2689 /* Now build a pointer to the 0,0,0... element. */
2690 tem = build0 (PLACEHOLDER_EXPR, type);
2691 for (i = 0, inner_type = type; i < ndim;
2692 i++, inner_type = TREE_TYPE (inner_type))
2693 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
2694 convert (TYPE_DOMAIN (inner_type), size_zero_node),
2695 NULL_TREE, NULL_TREE);
2696
2697 field_list
2698 = chainon (field_list,
2699 make_descriptor_field
2700 ("A0",
2701 build_pointer_type_for_mode (inner_type, SImode, false),
2702 record_type,
2703 build1 (ADDR_EXPR,
2704 build_pointer_type_for_mode (inner_type, SImode,
2705 false),
2706 tem)));
2707
2708 /* Next come the addressing coefficients. */
2709 tem = size_one_node;
2710 for (i = 0; i < ndim; i++)
2711 {
2712 char fname[3];
2713 tree idx_length
2714 = size_binop (MULT_EXPR, tem,
2715 size_binop (PLUS_EXPR,
2716 size_binop (MINUS_EXPR,
2717 TYPE_MAX_VALUE (idx_arr[i]),
2718 TYPE_MIN_VALUE (idx_arr[i])),
2719 size_int (1)));
2720
2721 fname[0] = ((mech == By_Descriptor_NCA ||
2722 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
2723 fname[1] = '0' + i, fname[2] = 0;
2724 field_list
2725 = chainon (field_list,
2726 make_descriptor_field (fname,
2727 gnat_type_for_size (32, 1),
2728 record_type, idx_length));
2729
2730 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
2731 tem = idx_length;
2732 }
2733
2734 /* Finally here are the bounds. */
2735 for (i = 0; i < ndim; i++)
2736 {
2737 char fname[3];
2738
2739 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
2740 field_list
2741 = chainon (field_list,
2742 make_descriptor_field
2743 (fname, gnat_type_for_size (32, 1), record_type,
2744 TYPE_MIN_VALUE (idx_arr[i])));
2745
2746 fname[0] = 'U';
2747 field_list
2748 = chainon (field_list,
2749 make_descriptor_field
2750 (fname, gnat_type_for_size (32, 1), record_type,
2751 TYPE_MAX_VALUE (idx_arr[i])));
2752 }
2753 break;
2754
2755 default:
2756 post_error ("unsupported descriptor type for &", gnat_entity);
2757 }
2758
2759 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
2760 finish_record_type (record_type, field_list, 0, true);
2761 return record_type;
2762 }
2763
2764 /* Build a 64bit VMS descriptor from a Mechanism_Type, which must specify
2765 a descriptor type, and the GCC type of an object. Each FIELD_DECL
2766 in the type contains in its DECL_INITIAL the expression to use when
2767 a constructor is made for the type. GNAT_ENTITY is an entity used
2768 to print out an error message if the mechanism cannot be applied to
2769 an object of that type and also for the name. */
2770
2771 tree
2772 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
2773 {
2774 tree record64_type = make_node (RECORD_TYPE);
2775 tree pointer64_type;
2776 tree field_list64 = 0;
2777 int class;
2778 int dtype = 0;
2779 tree inner_type;
2780 int ndim;
2781 int i;
2782 tree *idx_arr;
2783 tree tem;
2784
2785 /* If TYPE is an unconstrained array, use the underlying array type. */
2786 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
2787 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
2788
2789 /* If this is an array, compute the number of dimensions in the array,
2790 get the index types, and point to the inner type. */
2791 if (TREE_CODE (type) != ARRAY_TYPE)
2792 ndim = 0;
2793 else
2794 for (ndim = 1, inner_type = type;
2795 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
2796 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
2797 ndim++, inner_type = TREE_TYPE (inner_type))
2798 ;
2799
2800 idx_arr = (tree *) alloca (ndim * sizeof (tree));
2801
2802 if (mech != By_Descriptor_NCA
2803 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
2804 for (i = ndim - 1, inner_type = type;
2805 i >= 0;
2806 i--, inner_type = TREE_TYPE (inner_type))
2807 idx_arr[i] = TYPE_DOMAIN (inner_type);
2808 else
2809 for (i = 0, inner_type = type;
2810 i < ndim;
2811 i++, inner_type = TREE_TYPE (inner_type))
2812 idx_arr[i] = TYPE_DOMAIN (inner_type);
2813
2814 /* Now get the DTYPE value. */
2815 switch (TREE_CODE (type))
2816 {
2817 case INTEGER_TYPE:
2818 case ENUMERAL_TYPE:
2819 case BOOLEAN_TYPE:
2820 if (TYPE_VAX_FLOATING_POINT_P (type))
2821 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2822 {
2823 case 6:
2824 dtype = 10;
2825 break;
2826 case 9:
2827 dtype = 11;
2828 break;
2829 case 15:
2830 dtype = 27;
2831 break;
2832 }
2833 else
2834 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
2835 {
2836 case 8:
2837 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
2838 break;
2839 case 16:
2840 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
2841 break;
2842 case 32:
2843 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
2844 break;
2845 case 64:
2846 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
2847 break;
2848 case 128:
2849 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
2850 break;
2851 }
2852 break;
2853
2854 case REAL_TYPE:
2855 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
2856 break;
2857
2858 case COMPLEX_TYPE:
2859 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
2860 && TYPE_VAX_FLOATING_POINT_P (type))
2861 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
2862 {
2863 case 6:
2864 dtype = 12;
2865 break;
2866 case 9:
2867 dtype = 13;
2868 break;
2869 case 15:
2870 dtype = 29;
2871 }
2872 else
2873 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
2874 break;
2875
2876 case ARRAY_TYPE:
2877 dtype = 14;
2878 break;
2879
2880 default:
2881 break;
2882 }
2883
2884 /* Get the CLASS value. */
2885 switch (mech)
2886 {
2887 case By_Descriptor_A:
2888 class = 4;
2889 break;
2890 case By_Descriptor_NCA:
2891 class = 10;
2892 break;
2893 case By_Descriptor_SB:
2894 class = 15;
2895 break;
2896 case By_Descriptor:
2897 case By_Descriptor_S:
2898 default:
2899 class = 1;
2900 break;
2901 }
2902
2903 /* Make the type for a 64bit descriptor for VMS. The first six fields
2904 are the same for all types. */
2905
2906 field_list64 = chainon (field_list64,
2907 make_descriptor_field ("MBO",
2908 gnat_type_for_size (16, 1),
2909 record64_type, size_int (1)));
2910
2911 field_list64 = chainon (field_list64,
2912 make_descriptor_field ("DTYPE",
2913 gnat_type_for_size (8, 1),
2914 record64_type, size_int (dtype)));
2915 field_list64 = chainon (field_list64,
2916 make_descriptor_field ("CLASS",
2917 gnat_type_for_size (8, 1),
2918 record64_type, size_int (class)));
2919
2920 field_list64 = chainon (field_list64,
2921 make_descriptor_field ("MBMO",
2922 gnat_type_for_size (32, 1),
2923 record64_type, ssize_int (-1)));
2924
2925 field_list64
2926 = chainon (field_list64,
2927 make_descriptor_field
2928 ("LENGTH", gnat_type_for_size (64, 1), record64_type,
2929 size_in_bytes (mech == By_Descriptor_A ? inner_type : type)));
2930
2931 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
2932
2933 field_list64
2934 = chainon (field_list64,
2935 make_descriptor_field
2936 ("POINTER", pointer64_type, record64_type,
2937 build_unary_op (ADDR_EXPR,
2938 pointer64_type,
2939 build0 (PLACEHOLDER_EXPR, type))));
2940
2941 switch (mech)
2942 {
2943 case By_Descriptor:
2944 case By_Descriptor_S:
2945 break;
2946
2947 case By_Descriptor_SB:
2948 field_list64
2949 = chainon (field_list64,
2950 make_descriptor_field
2951 ("SB_L1", gnat_type_for_size (64, 1), record64_type,
2952 TREE_CODE (type) == ARRAY_TYPE
2953 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2954 field_list64
2955 = chainon (field_list64,
2956 make_descriptor_field
2957 ("SB_U1", gnat_type_for_size (64, 1), record64_type,
2958 TREE_CODE (type) == ARRAY_TYPE
2959 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type)) : size_zero_node));
2960 break;
2961
2962 case By_Descriptor_A:
2963 case By_Descriptor_NCA:
2964 field_list64 = chainon (field_list64,
2965 make_descriptor_field ("SCALE",
2966 gnat_type_for_size (8, 1),
2967 record64_type,
2968 size_zero_node));
2969
2970 field_list64 = chainon (field_list64,
2971 make_descriptor_field ("DIGITS",
2972 gnat_type_for_size (8, 1),
2973 record64_type,
2974 size_zero_node));
2975
2976 field_list64
2977 = chainon (field_list64,
2978 make_descriptor_field
2979 ("AFLAGS", gnat_type_for_size (8, 1), record64_type,
2980 size_int (mech == By_Descriptor_NCA
2981 ? 0
2982 /* Set FL_COLUMN, FL_COEFF, and FL_BOUNDS. */
2983 : (TREE_CODE (type) == ARRAY_TYPE
2984 && TYPE_CONVENTION_FORTRAN_P (type)
2985 ? 224 : 192))));
2986
2987 field_list64 = chainon (field_list64,
2988 make_descriptor_field ("DIMCT",
2989 gnat_type_for_size (8, 1),
2990 record64_type,
2991 size_int (ndim)));
2992
2993 field_list64 = chainon (field_list64,
2994 make_descriptor_field ("MBZ",
2995 gnat_type_for_size (32, 1),
2996 record64_type,
2997 size_int (0)));
2998 field_list64 = chainon (field_list64,
2999 make_descriptor_field ("ARSIZE",
3000 gnat_type_for_size (64, 1),
3001 record64_type,
3002 size_in_bytes (type)));
3003
3004 /* Now build a pointer to the 0,0,0... element. */
3005 tem = build0 (PLACEHOLDER_EXPR, type);
3006 for (i = 0, inner_type = type; i < ndim;
3007 i++, inner_type = TREE_TYPE (inner_type))
3008 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3009 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3010 NULL_TREE, NULL_TREE);
3011
3012 field_list64
3013 = chainon (field_list64,
3014 make_descriptor_field
3015 ("A0",
3016 build_pointer_type_for_mode (inner_type, DImode, false),
3017 record64_type,
3018 build1 (ADDR_EXPR,
3019 build_pointer_type_for_mode (inner_type, DImode,
3020 false),
3021 tem)));
3022
3023 /* Next come the addressing coefficients. */
3024 tem = size_one_node;
3025 for (i = 0; i < ndim; i++)
3026 {
3027 char fname[3];
3028 tree idx_length
3029 = size_binop (MULT_EXPR, tem,
3030 size_binop (PLUS_EXPR,
3031 size_binop (MINUS_EXPR,
3032 TYPE_MAX_VALUE (idx_arr[i]),
3033 TYPE_MIN_VALUE (idx_arr[i])),
3034 size_int (1)));
3035
3036 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3037 fname[1] = '0' + i, fname[2] = 0;
3038 field_list64
3039 = chainon (field_list64,
3040 make_descriptor_field (fname,
3041 gnat_type_for_size (64, 1),
3042 record64_type, idx_length));
3043
3044 if (mech == By_Descriptor_NCA)
3045 tem = idx_length;
3046 }
3047
3048 /* Finally here are the bounds. */
3049 for (i = 0; i < ndim; i++)
3050 {
3051 char fname[3];
3052
3053 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3054 field_list64
3055 = chainon (field_list64,
3056 make_descriptor_field
3057 (fname, gnat_type_for_size (64, 1), record64_type,
3058 TYPE_MIN_VALUE (idx_arr[i])));
3059
3060 fname[0] = 'U';
3061 field_list64
3062 = chainon (field_list64,
3063 make_descriptor_field
3064 (fname, gnat_type_for_size (64, 1), record64_type,
3065 TYPE_MAX_VALUE (idx_arr[i])));
3066 }
3067 break;
3068
3069 default:
3070 post_error ("unsupported descriptor type for &", gnat_entity);
3071 }
3072
3073 TYPE_NAME (record64_type) = create_concat_name (gnat_entity, "DESC64");
3074 finish_record_type (record64_type, field_list64, 0, true);
3075 return record64_type;
3076 }
3077
3078 /* Utility routine for above code to make a field. */
3079
3080 static tree
3081 make_descriptor_field (const char *name, tree type,
3082 tree rec_type, tree initial)
3083 {
3084 tree field
3085 = create_field_decl (get_identifier (name), type, rec_type, 0, 0, 0, 0);
3086
3087 DECL_INITIAL (field) = initial;
3088 return field;
3089 }
3090
3091 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3092 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3093 which the VMS descriptor is passed. */
3094
3095 static tree
3096 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3097 {
3098 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3099 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3100 /* The CLASS field is the 3rd field in the descriptor. */
3101 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3102 /* The POINTER field is the 6th field in the descriptor. */
3103 tree pointer64 = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (class)));
3104
3105 /* Retrieve the value of the POINTER field. */
3106 tree gnu_expr64
3107 = build3 (COMPONENT_REF, TREE_TYPE (pointer64), desc, pointer64, NULL_TREE);
3108
3109 if (POINTER_TYPE_P (gnu_type))
3110 return convert (gnu_type, gnu_expr64);
3111
3112 else if (TYPE_FAT_POINTER_P (gnu_type))
3113 {
3114 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3115 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3116 tree template_type = TREE_TYPE (p_bounds_type);
3117 tree min_field = TYPE_FIELDS (template_type);
3118 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3119 tree template, template_addr, aflags, dimct, t, u;
3120 /* See the head comment of build_vms_descriptor. */
3121 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3122 tree lfield, ufield;
3123
3124 /* Convert POINTER to the type of the P_ARRAY field. */
3125 gnu_expr64 = convert (p_array_type, gnu_expr64);
3126
3127 switch (iclass)
3128 {
3129 case 1: /* Class S */
3130 case 15: /* Class SB */
3131 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3132 t = TREE_CHAIN (TREE_CHAIN (class));
3133 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3134 t = tree_cons (min_field,
3135 convert (TREE_TYPE (min_field), integer_one_node),
3136 tree_cons (max_field,
3137 convert (TREE_TYPE (max_field), t),
3138 NULL_TREE));
3139 template = gnat_build_constructor (template_type, t);
3140 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3141
3142 /* For class S, we are done. */
3143 if (iclass == 1)
3144 break;
3145
3146 /* Test that we really have a SB descriptor, like DEC Ada. */
3147 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3148 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3149 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3150 /* If so, there is already a template in the descriptor and
3151 it is located right after the POINTER field. The fields are
3152 64bits so they must be repacked. */
3153 t = TREE_CHAIN (pointer64);
3154 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3155 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3156
3157 t = TREE_CHAIN (t);
3158 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3159 ufield = convert
3160 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3161
3162 /* Build the template in the form of a constructor. */
3163 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3164 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3165 ufield, NULL_TREE));
3166 template = gnat_build_constructor (template_type, t);
3167
3168 /* Otherwise use the {1, LENGTH} template we build above. */
3169 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3170 build_unary_op (ADDR_EXPR, p_bounds_type,
3171 template),
3172 template_addr);
3173 break;
3174
3175 case 4: /* Class A */
3176 /* The AFLAGS field is the 3rd field after the pointer in the
3177 descriptor. */
3178 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer64)));
3179 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3180 /* The DIMCT field is the next field in the descriptor after
3181 aflags. */
3182 t = TREE_CHAIN (t);
3183 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3184 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3185 or FL_COEFF or FL_BOUNDS not set. */
3186 u = build_int_cst (TREE_TYPE (aflags), 192);
3187 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3188 build_binary_op (NE_EXPR, integer_type_node,
3189 dimct,
3190 convert (TREE_TYPE (dimct),
3191 size_one_node)),
3192 build_binary_op (NE_EXPR, integer_type_node,
3193 build2 (BIT_AND_EXPR,
3194 TREE_TYPE (aflags),
3195 aflags, u),
3196 u));
3197 /* There is already a template in the descriptor and it is located
3198 in block 3. The fields are 64bits so they must be repacked. */
3199 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN
3200 (t)))));
3201 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3202 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3203
3204 t = TREE_CHAIN (t);
3205 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3206 ufield = convert
3207 (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (template_type))), ufield);
3208
3209 /* Build the template in the form of a constructor. */
3210 t = tree_cons (TYPE_FIELDS (template_type), lfield,
3211 tree_cons (TREE_CHAIN (TYPE_FIELDS (template_type)),
3212 ufield, NULL_TREE));
3213 template = gnat_build_constructor (template_type, t);
3214 template = build3 (COND_EXPR, p_bounds_type, u,
3215 build_call_raise (CE_Length_Check_Failed, Empty,
3216 N_Raise_Constraint_Error),
3217 template);
3218 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3219 break;
3220
3221 case 10: /* Class NCA */
3222 default:
3223 post_error ("unsupported descriptor type for &", gnat_subprog);
3224 template_addr = integer_zero_node;
3225 break;
3226 }
3227
3228 /* Build the fat pointer in the form of a constructor. */
3229 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr64,
3230 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3231 template_addr, NULL_TREE));
3232 return gnat_build_constructor (gnu_type, t);
3233 }
3234
3235 else
3236 gcc_unreachable ();
3237 }
3238
3239 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3240 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3241 which the VMS descriptor is passed. */
3242
3243 static tree
3244 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3245 {
3246 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3247 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3248 /* The CLASS field is the 3rd field in the descriptor. */
3249 tree class = TREE_CHAIN (TREE_CHAIN (TYPE_FIELDS (desc_type)));
3250 /* The POINTER field is the 4th field in the descriptor. */
3251 tree pointer = TREE_CHAIN (class);
3252
3253 /* Retrieve the value of the POINTER field. */
3254 tree gnu_expr32
3255 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3256
3257 if (POINTER_TYPE_P (gnu_type))
3258 return convert (gnu_type, gnu_expr32);
3259
3260 else if (TYPE_FAT_POINTER_P (gnu_type))
3261 {
3262 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3263 tree p_bounds_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (gnu_type)));
3264 tree template_type = TREE_TYPE (p_bounds_type);
3265 tree min_field = TYPE_FIELDS (template_type);
3266 tree max_field = TREE_CHAIN (TYPE_FIELDS (template_type));
3267 tree template, template_addr, aflags, dimct, t, u;
3268 /* See the head comment of build_vms_descriptor. */
3269 int iclass = TREE_INT_CST_LOW (DECL_INITIAL (class));
3270
3271 /* Convert POINTER to the type of the P_ARRAY field. */
3272 gnu_expr32 = convert (p_array_type, gnu_expr32);
3273
3274 switch (iclass)
3275 {
3276 case 1: /* Class S */
3277 case 15: /* Class SB */
3278 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3279 t = TYPE_FIELDS (desc_type);
3280 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3281 t = tree_cons (min_field,
3282 convert (TREE_TYPE (min_field), integer_one_node),
3283 tree_cons (max_field,
3284 convert (TREE_TYPE (max_field), t),
3285 NULL_TREE));
3286 template = gnat_build_constructor (template_type, t);
3287 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template);
3288
3289 /* For class S, we are done. */
3290 if (iclass == 1)
3291 break;
3292
3293 /* Test that we really have a SB descriptor, like DEC Ada. */
3294 t = build3 (COMPONENT_REF, TREE_TYPE (class), desc, class, NULL);
3295 u = convert (TREE_TYPE (class), DECL_INITIAL (class));
3296 u = build_binary_op (EQ_EXPR, integer_type_node, t, u);
3297 /* If so, there is already a template in the descriptor and
3298 it is located right after the POINTER field. */
3299 t = TREE_CHAIN (pointer);
3300 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3301 /* Otherwise use the {1, LENGTH} template we build above. */
3302 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3303 build_unary_op (ADDR_EXPR, p_bounds_type,
3304 template),
3305 template_addr);
3306 break;
3307
3308 case 4: /* Class A */
3309 /* The AFLAGS field is the 7th field in the descriptor. */
3310 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (pointer)));
3311 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3312 /* The DIMCT field is the 8th field in the descriptor. */
3313 t = TREE_CHAIN (t);
3314 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3315 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3316 or FL_COEFF or FL_BOUNDS not set. */
3317 u = build_int_cst (TREE_TYPE (aflags), 192);
3318 u = build_binary_op (TRUTH_OR_EXPR, integer_type_node,
3319 build_binary_op (NE_EXPR, integer_type_node,
3320 dimct,
3321 convert (TREE_TYPE (dimct),
3322 size_one_node)),
3323 build_binary_op (NE_EXPR, integer_type_node,
3324 build2 (BIT_AND_EXPR,
3325 TREE_TYPE (aflags),
3326 aflags, u),
3327 u));
3328 /* There is already a template in the descriptor and it is
3329 located at the start of block 3 (12th field). */
3330 t = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (t))));
3331 template = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3332 template = build3 (COND_EXPR, p_bounds_type, u,
3333 build_call_raise (CE_Length_Check_Failed, Empty,
3334 N_Raise_Constraint_Error),
3335 template);
3336 template_addr = build_unary_op (ADDR_EXPR, p_bounds_type, template);
3337 break;
3338
3339 case 10: /* Class NCA */
3340 default:
3341 post_error ("unsupported descriptor type for &", gnat_subprog);
3342 template_addr = integer_zero_node;
3343 break;
3344 }
3345
3346 /* Build the fat pointer in the form of a constructor. */
3347 t = tree_cons (TYPE_FIELDS (gnu_type), gnu_expr32,
3348 tree_cons (TREE_CHAIN (TYPE_FIELDS (gnu_type)),
3349 template_addr, NULL_TREE));
3350
3351 return gnat_build_constructor (gnu_type, t);
3352 }
3353
3354 else
3355 gcc_unreachable ();
3356 }
3357
3358 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
3359 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
3360 pointer type of GNU_EXPR. GNAT_SUBPROG is the subprogram to which the
3361 VMS descriptor is passed. */
3362
3363 static tree
3364 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
3365 Entity_Id gnat_subprog)
3366 {
3367 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3368 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3369 tree mbo = TYPE_FIELDS (desc_type);
3370 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
3371 tree mbmo = TREE_CHAIN (TREE_CHAIN (TREE_CHAIN (mbo)));
3372 tree is64bit, gnu_expr32, gnu_expr64;
3373
3374 /* If the field name is not MBO, it must be 32-bit and no alternate.
3375 Otherwise primary must be 64-bit and alternate 32-bit. */
3376 if (strcmp (mbostr, "MBO") != 0)
3377 return convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3378
3379 /* Build the test for 64-bit descriptor. */
3380 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
3381 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
3382 is64bit
3383 = build_binary_op (TRUTH_ANDIF_EXPR, integer_type_node,
3384 build_binary_op (EQ_EXPR, integer_type_node,
3385 convert (integer_type_node, mbo),
3386 integer_one_node),
3387 build_binary_op (EQ_EXPR, integer_type_node,
3388 convert (integer_type_node, mbmo),
3389 integer_minus_one_node));
3390
3391 /* Build the 2 possible end results. */
3392 gnu_expr64 = convert_vms_descriptor64 (gnu_type, gnu_expr, gnat_subprog);
3393 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
3394 gnu_expr32 = convert_vms_descriptor32 (gnu_type, gnu_expr, gnat_subprog);
3395
3396 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
3397 }
3398
3399 /* Build a stub for the subprogram specified by the GCC tree GNU_SUBPROG
3400 and the GNAT node GNAT_SUBPROG. */
3401
3402 void
3403 build_function_stub (tree gnu_subprog, Entity_Id gnat_subprog)
3404 {
3405 tree gnu_subprog_type, gnu_subprog_addr, gnu_subprog_call;
3406 tree gnu_stub_param, gnu_param_list, gnu_arg_types, gnu_param;
3407 tree gnu_stub_decl = DECL_FUNCTION_STUB (gnu_subprog);
3408 tree gnu_body;
3409
3410 gnu_subprog_type = TREE_TYPE (gnu_subprog);
3411 gnu_param_list = NULL_TREE;
3412
3413 begin_subprog_body (gnu_stub_decl);
3414 gnat_pushlevel ();
3415
3416 start_stmt_group ();
3417
3418 /* Loop over the parameters of the stub and translate any of them
3419 passed by descriptor into a by reference one. */
3420 for (gnu_stub_param = DECL_ARGUMENTS (gnu_stub_decl),
3421 gnu_arg_types = TYPE_ARG_TYPES (gnu_subprog_type);
3422 gnu_stub_param;
3423 gnu_stub_param = TREE_CHAIN (gnu_stub_param),
3424 gnu_arg_types = TREE_CHAIN (gnu_arg_types))
3425 {
3426 if (DECL_BY_DESCRIPTOR_P (gnu_stub_param))
3427 gnu_param
3428 = convert_vms_descriptor (TREE_VALUE (gnu_arg_types),
3429 gnu_stub_param,
3430 DECL_PARM_ALT_TYPE (gnu_stub_param),
3431 gnat_subprog);
3432 else
3433 gnu_param = gnu_stub_param;
3434
3435 gnu_param_list = tree_cons (NULL_TREE, gnu_param, gnu_param_list);
3436 }
3437
3438 gnu_body = end_stmt_group ();
3439
3440 /* Invoke the internal subprogram. */
3441 gnu_subprog_addr = build1 (ADDR_EXPR, build_pointer_type (gnu_subprog_type),
3442 gnu_subprog);
3443 gnu_subprog_call = build_call_list (TREE_TYPE (gnu_subprog_type),
3444 gnu_subprog_addr,
3445 nreverse (gnu_param_list));
3446
3447 /* Propagate the return value, if any. */
3448 if (VOID_TYPE_P (TREE_TYPE (gnu_subprog_type)))
3449 append_to_statement_list (gnu_subprog_call, &gnu_body);
3450 else
3451 append_to_statement_list (build_return_expr (DECL_RESULT (gnu_stub_decl),
3452 gnu_subprog_call),
3453 &gnu_body);
3454
3455 gnat_poplevel ();
3456
3457 allocate_struct_function (gnu_stub_decl, false);
3458 end_subprog_body (gnu_body, false);
3459 }
3460 \f
3461 /* Build a type to be used to represent an aliased object whose nominal
3462 type is an unconstrained array. This consists of a RECORD_TYPE containing
3463 a field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an
3464 ARRAY_TYPE. If ARRAY_TYPE is that of the unconstrained array, this
3465 is used to represent an arbitrary unconstrained object. Use NAME
3466 as the name of the record. */
3467
3468 tree
3469 build_unc_object_type (tree template_type, tree object_type, tree name)
3470 {
3471 tree type = make_node (RECORD_TYPE);
3472 tree template_field = create_field_decl (get_identifier ("BOUNDS"),
3473 template_type, type, 0, 0, 0, 1);
3474 tree array_field = create_field_decl (get_identifier ("ARRAY"), object_type,
3475 type, 0, 0, 0, 1);
3476
3477 TYPE_NAME (type) = name;
3478 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3479 finish_record_type (type,
3480 chainon (chainon (NULL_TREE, template_field),
3481 array_field),
3482 0, false);
3483
3484 return type;
3485 }
3486
3487 /* Same, taking a thin or fat pointer type instead of a template type. */
3488
3489 tree
3490 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3491 tree name)
3492 {
3493 tree template_type;
3494
3495 gcc_assert (TYPE_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3496
3497 template_type
3498 = (TYPE_FAT_POINTER_P (thin_fat_ptr_type)
3499 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3500 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3501 return build_unc_object_type (template_type, object_type, name);
3502 }
3503
3504 /* Shift the component offsets within an unconstrained object TYPE to make it
3505 suitable for use as a designated type for thin pointers. */
3506
3507 void
3508 shift_unc_components_for_thin_pointers (tree type)
3509 {
3510 /* Thin pointer values designate the ARRAY data of an unconstrained object,
3511 allocated past the BOUNDS template. The designated type is adjusted to
3512 have ARRAY at position zero and the template at a negative offset, so
3513 that COMPONENT_REFs on (*thin_ptr) designate the proper location. */
3514
3515 tree bounds_field = TYPE_FIELDS (type);
3516 tree array_field = TREE_CHAIN (TYPE_FIELDS (type));
3517
3518 DECL_FIELD_OFFSET (bounds_field)
3519 = size_binop (MINUS_EXPR, size_zero_node, byte_position (array_field));
3520
3521 DECL_FIELD_OFFSET (array_field) = size_zero_node;
3522 DECL_FIELD_BIT_OFFSET (array_field) = bitsize_zero_node;
3523 }
3524 \f
3525 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3526 In the normal case this is just two adjustments, but we have more to
3527 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3528
3529 void
3530 update_pointer_to (tree old_type, tree new_type)
3531 {
3532 tree ptr = TYPE_POINTER_TO (old_type);
3533 tree ref = TYPE_REFERENCE_TO (old_type);
3534 tree ptr1, ref1;
3535 tree type;
3536
3537 /* If this is the main variant, process all the other variants first. */
3538 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3539 for (type = TYPE_NEXT_VARIANT (old_type); type;
3540 type = TYPE_NEXT_VARIANT (type))
3541 update_pointer_to (type, new_type);
3542
3543 /* If no pointers and no references, we are done. */
3544 if (!ptr && !ref)
3545 return;
3546
3547 /* Merge the old type qualifiers in the new type.
3548
3549 Each old variant has qualifiers for specific reasons, and the new
3550 designated type as well. Each set of qualifiers represents useful
3551 information grabbed at some point, and merging the two simply unifies
3552 these inputs into the final type description.
3553
3554 Consider for instance a volatile type frozen after an access to constant
3555 type designating it; after the designated type's freeze, we get here with
3556 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3557 when the access type was processed. We will make a volatile and readonly
3558 designated type, because that's what it really is.
3559
3560 We might also get here for a non-dummy OLD_TYPE variant with different
3561 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3562 to private record type elaboration (see the comments around the call to
3563 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3564 the qualifiers in those cases too, to avoid accidentally discarding the
3565 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3566 new_type
3567 = build_qualified_type (new_type,
3568 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3569
3570 /* If old type and new type are identical, there is nothing to do. */
3571 if (old_type == new_type)
3572 return;
3573
3574 /* Otherwise, first handle the simple case. */
3575 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3576 {
3577 TYPE_POINTER_TO (new_type) = ptr;
3578 TYPE_REFERENCE_TO (new_type) = ref;
3579
3580 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3581 for (ptr1 = TYPE_MAIN_VARIANT (ptr); ptr1;
3582 ptr1 = TYPE_NEXT_VARIANT (ptr1))
3583 TREE_TYPE (ptr1) = new_type;
3584
3585 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3586 for (ref1 = TYPE_MAIN_VARIANT (ref); ref1;
3587 ref1 = TYPE_NEXT_VARIANT (ref1))
3588 TREE_TYPE (ref1) = new_type;
3589 }
3590
3591 /* Now deal with the unconstrained array case. In this case the "pointer"
3592 is actually a RECORD_TYPE where both fields are pointers to dummy nodes.
3593 Turn them into pointers to the correct types using update_pointer_to. */
3594 else if (!TYPE_FAT_POINTER_P (ptr))
3595 gcc_unreachable ();
3596
3597 else
3598 {
3599 tree new_obj_rec = TYPE_OBJECT_RECORD_TYPE (new_type);
3600 tree array_field = TYPE_FIELDS (ptr);
3601 tree bounds_field = TREE_CHAIN (TYPE_FIELDS (ptr));
3602 tree new_ptr = TYPE_POINTER_TO (new_type);
3603 tree new_ref;
3604 tree var;
3605
3606 /* Make pointers to the dummy template point to the real template. */
3607 update_pointer_to
3608 (TREE_TYPE (TREE_TYPE (bounds_field)),
3609 TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_ptr)))));
3610
3611 /* The references to the template bounds present in the array type
3612 are made through a PLACEHOLDER_EXPR of type NEW_PTR. Since we
3613 are updating PTR to make it a full replacement for NEW_PTR as
3614 pointer to NEW_TYPE, we must rework the PLACEHOLDER_EXPR so as
3615 to make it of type PTR. */
3616 new_ref = build3 (COMPONENT_REF, TREE_TYPE (bounds_field),
3617 build0 (PLACEHOLDER_EXPR, ptr),
3618 bounds_field, NULL_TREE);
3619
3620 /* Create the new array for the new PLACEHOLDER_EXPR and make pointers
3621 to the dummy array point to it. */
3622 update_pointer_to
3623 (TREE_TYPE (TREE_TYPE (array_field)),
3624 substitute_in_type (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))),
3625 TREE_CHAIN (TYPE_FIELDS (new_ptr)), new_ref));
3626
3627 /* Make PTR the pointer to NEW_TYPE. */
3628 TYPE_POINTER_TO (new_type) = TYPE_REFERENCE_TO (new_type)
3629 = TREE_TYPE (new_type) = ptr;
3630
3631 for (var = TYPE_MAIN_VARIANT (ptr); var; var = TYPE_NEXT_VARIANT (var))
3632 SET_TYPE_UNCONSTRAINED_ARRAY (var, new_type);
3633
3634 /* Now handle updating the allocation record, what the thin pointer
3635 points to. Update all pointers from the old record into the new
3636 one, update the type of the array field, and recompute the size. */
3637 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type), new_obj_rec);
3638
3639 TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec)))
3640 = TREE_TYPE (TREE_TYPE (array_field));
3641
3642 /* The size recomputation needs to account for alignment constraints, so
3643 we let layout_type work it out. This will reset the field offsets to
3644 what they would be in a regular record, so we shift them back to what
3645 we want them to be for a thin pointer designated type afterwards. */
3646 DECL_SIZE (TYPE_FIELDS (new_obj_rec)) = 0;
3647 DECL_SIZE (TREE_CHAIN (TYPE_FIELDS (new_obj_rec))) = 0;
3648 TYPE_SIZE (new_obj_rec) = 0;
3649 layout_type (new_obj_rec);
3650
3651 shift_unc_components_for_thin_pointers (new_obj_rec);
3652
3653 /* We are done, at last. */
3654 rest_of_record_type_compilation (ptr);
3655 }
3656 }
3657 \f
3658 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3659 unconstrained one. This involves making or finding a template. */
3660
3661 static tree
3662 convert_to_fat_pointer (tree type, tree expr)
3663 {
3664 tree template_type = TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type))));
3665 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3666 tree etype = TREE_TYPE (expr);
3667 tree template;
3668
3669 /* If EXPR is null, make a fat pointer that contains null pointers to the
3670 template and array. */
3671 if (integer_zerop (expr))
3672 return
3673 gnat_build_constructor
3674 (type,
3675 tree_cons (TYPE_FIELDS (type),
3676 convert (p_array_type, expr),
3677 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3678 convert (build_pointer_type (template_type),
3679 expr),
3680 NULL_TREE)));
3681
3682 /* If EXPR is a thin pointer, make template and data from the record.. */
3683 else if (TYPE_THIN_POINTER_P (etype))
3684 {
3685 tree fields = TYPE_FIELDS (TREE_TYPE (etype));
3686
3687 expr = save_expr (expr);
3688 if (TREE_CODE (expr) == ADDR_EXPR)
3689 expr = TREE_OPERAND (expr, 0);
3690 else
3691 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
3692
3693 template = build_component_ref (expr, NULL_TREE, fields, false);
3694 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3695 build_component_ref (expr, NULL_TREE,
3696 TREE_CHAIN (fields), false));
3697 }
3698
3699 /* Otherwise, build the constructor for the template. */
3700 else
3701 template = build_template (template_type, TREE_TYPE (etype), expr);
3702
3703 /* The final result is a constructor for the fat pointer.
3704
3705 If EXPR is an argument of a foreign convention subprogram, the type it
3706 points to is directly the component type. In this case, the expression
3707 type may not match the corresponding FIELD_DECL type at this point, so we
3708 call "convert" here to fix that up if necessary. This type consistency is
3709 required, for instance because it ensures that possible later folding of
3710 COMPONENT_REFs against this constructor always yields something of the
3711 same type as the initial reference.
3712
3713 Note that the call to "build_template" above is still fine because it
3714 will only refer to the provided TEMPLATE_TYPE in this case. */
3715 return
3716 gnat_build_constructor
3717 (type,
3718 tree_cons (TYPE_FIELDS (type),
3719 convert (p_array_type, expr),
3720 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3721 build_unary_op (ADDR_EXPR, NULL_TREE, template),
3722 NULL_TREE)));
3723 }
3724 \f
3725 /* Convert to a thin pointer type, TYPE. The only thing we know how to convert
3726 is something that is a fat pointer, so convert to it first if it EXPR
3727 is not already a fat pointer. */
3728
3729 static tree
3730 convert_to_thin_pointer (tree type, tree expr)
3731 {
3732 if (!TYPE_FAT_POINTER_P (TREE_TYPE (expr)))
3733 expr
3734 = convert_to_fat_pointer
3735 (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))), expr);
3736
3737 /* We get the pointer to the data and use a NOP_EXPR to make it the
3738 proper GCC type. */
3739 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (TREE_TYPE (expr)),
3740 false);
3741 expr = build1 (NOP_EXPR, type, expr);
3742
3743 return expr;
3744 }
3745 \f
3746 /* Create an expression whose value is that of EXPR,
3747 converted to type TYPE. The TREE_TYPE of the value
3748 is always TYPE. This function implements all reasonable
3749 conversions; callers should filter out those that are
3750 not permitted by the language being compiled. */
3751
3752 tree
3753 convert (tree type, tree expr)
3754 {
3755 enum tree_code code = TREE_CODE (type);
3756 tree etype = TREE_TYPE (expr);
3757 enum tree_code ecode = TREE_CODE (etype);
3758
3759 /* If EXPR is already the right type, we are done. */
3760 if (type == etype)
3761 return expr;
3762
3763 /* If both input and output have padding and are of variable size, do this
3764 as an unchecked conversion. Likewise if one is a mere variant of the
3765 other, so we avoid a pointless unpad/repad sequence. */
3766 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3767 && TYPE_IS_PADDING_P (type) && TYPE_IS_PADDING_P (etype)
3768 && (!TREE_CONSTANT (TYPE_SIZE (type))
3769 || !TREE_CONSTANT (TYPE_SIZE (etype))
3770 || gnat_types_compatible_p (type, etype)
3771 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3772 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3773 ;
3774
3775 /* If the output type has padding, convert to the inner type and
3776 make a constructor to build the record. */
3777 else if (code == RECORD_TYPE && TYPE_IS_PADDING_P (type))
3778 {
3779 /* If we previously converted from another type and our type is
3780 of variable size, remove the conversion to avoid the need for
3781 variable-size temporaries. Likewise for a conversion between
3782 original and packable version. */
3783 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3784 && (!TREE_CONSTANT (TYPE_SIZE (type))
3785 || (ecode == RECORD_TYPE
3786 && TYPE_NAME (etype)
3787 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3788 expr = TREE_OPERAND (expr, 0);
3789
3790 /* If we are just removing the padding from expr, convert the original
3791 object if we have variable size in order to avoid the need for some
3792 variable-size temporaries. Likewise if the padding is a mere variant
3793 of the other, so we avoid a pointless unpad/repad sequence. */
3794 if (TREE_CODE (expr) == COMPONENT_REF
3795 && TREE_CODE (TREE_TYPE (TREE_OPERAND (expr, 0))) == RECORD_TYPE
3796 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3797 && (!TREE_CONSTANT (TYPE_SIZE (type))
3798 || gnat_types_compatible_p (type,
3799 TREE_TYPE (TREE_OPERAND (expr, 0)))
3800 || (ecode == RECORD_TYPE
3801 && TYPE_NAME (etype)
3802 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
3803 return convert (type, TREE_OPERAND (expr, 0));
3804
3805 /* If the result type is a padded type with a self-referentially-sized
3806 field and the expression type is a record, do this as an
3807 unchecked conversion. */
3808 else if (TREE_CODE (etype) == RECORD_TYPE
3809 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
3810 return unchecked_convert (type, expr, false);
3811
3812 else
3813 return
3814 gnat_build_constructor (type,
3815 tree_cons (TYPE_FIELDS (type),
3816 convert (TREE_TYPE
3817 (TYPE_FIELDS (type)),
3818 expr),
3819 NULL_TREE));
3820 }
3821
3822 /* If the input type has padding, remove it and convert to the output type.
3823 The conditions ordering is arranged to ensure that the output type is not
3824 a padding type here, as it is not clear whether the conversion would
3825 always be correct if this was to happen. */
3826 else if (ecode == RECORD_TYPE && TYPE_IS_PADDING_P (etype))
3827 {
3828 tree unpadded;
3829
3830 /* If we have just converted to this padded type, just get the
3831 inner expression. */
3832 if (TREE_CODE (expr) == CONSTRUCTOR
3833 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
3834 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->index
3835 == TYPE_FIELDS (etype))
3836 unpadded
3837 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0)->value;
3838
3839 /* Otherwise, build an explicit component reference. */
3840 else
3841 unpadded
3842 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
3843
3844 return convert (type, unpadded);
3845 }
3846
3847 /* If the input is a biased type, adjust first. */
3848 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
3849 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
3850 fold_convert (TREE_TYPE (etype),
3851 expr),
3852 TYPE_MIN_VALUE (etype)));
3853
3854 /* If the input is a justified modular type, we need to extract the actual
3855 object before converting it to any other type with the exceptions of an
3856 unconstrained array or of a mere type variant. It is useful to avoid the
3857 extraction and conversion in the type variant case because it could end
3858 up replacing a VAR_DECL expr by a constructor and we might be about the
3859 take the address of the result. */
3860 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
3861 && code != UNCONSTRAINED_ARRAY_TYPE
3862 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
3863 return convert (type, build_component_ref (expr, NULL_TREE,
3864 TYPE_FIELDS (etype), false));
3865
3866 /* If converting to a type that contains a template, convert to the data
3867 type and then build the template. */
3868 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
3869 {
3870 tree obj_type = TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (type)));
3871
3872 /* If the source already has a template, get a reference to the
3873 associated array only, as we are going to rebuild a template
3874 for the target type anyway. */
3875 expr = maybe_unconstrained_array (expr);
3876
3877 return
3878 gnat_build_constructor
3879 (type,
3880 tree_cons (TYPE_FIELDS (type),
3881 build_template (TREE_TYPE (TYPE_FIELDS (type)),
3882 obj_type, NULL_TREE),
3883 tree_cons (TREE_CHAIN (TYPE_FIELDS (type)),
3884 convert (obj_type, expr), NULL_TREE)));
3885 }
3886
3887 /* There are some special cases of expressions that we process
3888 specially. */
3889 switch (TREE_CODE (expr))
3890 {
3891 case ERROR_MARK:
3892 return expr;
3893
3894 case NULL_EXPR:
3895 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
3896 conversion in gnat_expand_expr. NULL_EXPR does not represent
3897 and actual value, so no conversion is needed. */
3898 expr = copy_node (expr);
3899 TREE_TYPE (expr) = type;
3900 return expr;
3901
3902 case STRING_CST:
3903 /* If we are converting a STRING_CST to another constrained array type,
3904 just make a new one in the proper type. */
3905 if (code == ecode && AGGREGATE_TYPE_P (etype)
3906 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
3907 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
3908 {
3909 expr = copy_node (expr);
3910 TREE_TYPE (expr) = type;
3911 return expr;
3912 }
3913 break;
3914
3915 case CONSTRUCTOR:
3916 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
3917 a new one in the proper type. */
3918 if (code == ecode && gnat_types_compatible_p (type, etype))
3919 {
3920 expr = copy_node (expr);
3921 TREE_TYPE (expr) = type;
3922 return expr;
3923 }
3924
3925 /* Likewise for a conversion between original and packable version, but
3926 we have to work harder in order to preserve type consistency. */
3927 if (code == ecode
3928 && code == RECORD_TYPE
3929 && TYPE_NAME (type) == TYPE_NAME (etype))
3930 {
3931 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
3932 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
3933 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
3934 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
3935 unsigned HOST_WIDE_INT idx;
3936 tree index, value;
3937
3938 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
3939 {
3940 constructor_elt *elt = VEC_quick_push (constructor_elt, v, NULL);
3941 /* We expect only simple constructors. Otherwise, punt. */
3942 if (!(index == efield || index == DECL_ORIGINAL_FIELD (efield)))
3943 break;
3944 elt->index = field;
3945 elt->value = convert (TREE_TYPE (field), value);
3946 efield = TREE_CHAIN (efield);
3947 field = TREE_CHAIN (field);
3948 }
3949
3950 if (idx == len)
3951 {
3952 expr = copy_node (expr);
3953 TREE_TYPE (expr) = type;
3954 CONSTRUCTOR_ELTS (expr) = v;
3955 return expr;
3956 }
3957 }
3958 break;
3959
3960 case UNCONSTRAINED_ARRAY_REF:
3961 /* Convert this to the type of the inner array by getting the address of
3962 the array from the template. */
3963 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
3964 build_component_ref (TREE_OPERAND (expr, 0),
3965 get_identifier ("P_ARRAY"),
3966 NULL_TREE, false));
3967 etype = TREE_TYPE (expr);
3968 ecode = TREE_CODE (etype);
3969 break;
3970
3971 case VIEW_CONVERT_EXPR:
3972 {
3973 /* GCC 4.x is very sensitive to type consistency overall, and view
3974 conversions thus are very frequent. Even though just "convert"ing
3975 the inner operand to the output type is fine in most cases, it
3976 might expose unexpected input/output type mismatches in special
3977 circumstances so we avoid such recursive calls when we can. */
3978 tree op0 = TREE_OPERAND (expr, 0);
3979
3980 /* If we are converting back to the original type, we can just
3981 lift the input conversion. This is a common occurrence with
3982 switches back-and-forth amongst type variants. */
3983 if (type == TREE_TYPE (op0))
3984 return op0;
3985
3986 /* Otherwise, if we're converting between two aggregate types, we
3987 might be allowed to substitute the VIEW_CONVERT_EXPR target type
3988 in place or to just convert the inner expression. */
3989 if (AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
3990 {
3991 /* If we are converting between mere variants, we can just
3992 substitute the VIEW_CONVERT_EXPR in place. */
3993 if (gnat_types_compatible_p (type, etype))
3994 return build1 (VIEW_CONVERT_EXPR, type, op0);
3995
3996 /* Otherwise, we may just bypass the input view conversion unless
3997 one of the types is a fat pointer, which is handled by
3998 specialized code below which relies on exact type matching. */
3999 else if (!TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4000 return convert (type, op0);
4001 }
4002 }
4003 break;
4004
4005 case INDIRECT_REF:
4006 /* If both types are record types, just convert the pointer and
4007 make a new INDIRECT_REF.
4008
4009 ??? Disable this for now since it causes problems with the
4010 code in build_binary_op for MODIFY_EXPR which wants to
4011 strip off conversions. But that code really is a mess and
4012 we need to do this a much better way some time. */
4013 if (0
4014 && (TREE_CODE (type) == RECORD_TYPE
4015 || TREE_CODE (type) == UNION_TYPE)
4016 && (TREE_CODE (etype) == RECORD_TYPE
4017 || TREE_CODE (etype) == UNION_TYPE)
4018 && !TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4019 return build_unary_op (INDIRECT_REF, NULL_TREE,
4020 convert (build_pointer_type (type),
4021 TREE_OPERAND (expr, 0)));
4022 break;
4023
4024 default:
4025 break;
4026 }
4027
4028 /* Check for converting to a pointer to an unconstrained array. */
4029 if (TYPE_FAT_POINTER_P (type) && !TYPE_FAT_POINTER_P (etype))
4030 return convert_to_fat_pointer (type, expr);
4031
4032 /* If we are converting between two aggregate types that are mere
4033 variants, just make a VIEW_CONVERT_EXPR. */
4034 else if (code == ecode
4035 && AGGREGATE_TYPE_P (type)
4036 && gnat_types_compatible_p (type, etype))
4037 return build1 (VIEW_CONVERT_EXPR, type, expr);
4038
4039 /* In all other cases of related types, make a NOP_EXPR. */
4040 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4041 || (code == INTEGER_CST && ecode == INTEGER_CST
4042 && (type == TREE_TYPE (etype) || etype == TREE_TYPE (type))))
4043 return fold_convert (type, expr);
4044
4045 switch (code)
4046 {
4047 case VOID_TYPE:
4048 return fold_build1 (CONVERT_EXPR, type, expr);
4049
4050 case INTEGER_TYPE:
4051 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4052 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4053 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4054 return unchecked_convert (type, expr, false);
4055 else if (TYPE_BIASED_REPRESENTATION_P (type))
4056 return fold_convert (type,
4057 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4058 convert (TREE_TYPE (type), expr),
4059 TYPE_MIN_VALUE (type)));
4060
4061 /* ... fall through ... */
4062
4063 case ENUMERAL_TYPE:
4064 case BOOLEAN_TYPE:
4065 /* If we are converting an additive expression to an integer type
4066 with lower precision, be wary of the optimization that can be
4067 applied by convert_to_integer. There are 2 problematic cases:
4068 - if the first operand was originally of a biased type,
4069 because we could be recursively called to convert it
4070 to an intermediate type and thus rematerialize the
4071 additive operator endlessly,
4072 - if the expression contains a placeholder, because an
4073 intermediate conversion that changes the sign could
4074 be inserted and thus introduce an artificial overflow
4075 at compile time when the placeholder is substituted. */
4076 if (code == INTEGER_TYPE
4077 && ecode == INTEGER_TYPE
4078 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4079 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4080 {
4081 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4082
4083 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4084 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4085 || CONTAINS_PLACEHOLDER_P (expr))
4086 return build1 (NOP_EXPR, type, expr);
4087 }
4088
4089 return fold (convert_to_integer (type, expr));
4090
4091 case POINTER_TYPE:
4092 case REFERENCE_TYPE:
4093 /* If converting between two pointers to records denoting
4094 both a template and type, adjust if needed to account
4095 for any differing offsets, since one might be negative. */
4096 if (TYPE_THIN_POINTER_P (etype) && TYPE_THIN_POINTER_P (type))
4097 {
4098 tree bit_diff
4099 = size_diffop (bit_position (TYPE_FIELDS (TREE_TYPE (etype))),
4100 bit_position (TYPE_FIELDS (TREE_TYPE (type))));
4101 tree byte_diff = size_binop (CEIL_DIV_EXPR, bit_diff,
4102 sbitsize_int (BITS_PER_UNIT));
4103
4104 expr = build1 (NOP_EXPR, type, expr);
4105 TREE_CONSTANT (expr) = TREE_CONSTANT (TREE_OPERAND (expr, 0));
4106 if (integer_zerop (byte_diff))
4107 return expr;
4108
4109 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4110 fold (convert (sizetype, byte_diff)));
4111 }
4112
4113 /* If converting to a thin pointer, handle specially. */
4114 if (TYPE_THIN_POINTER_P (type)
4115 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)))
4116 return convert_to_thin_pointer (type, expr);
4117
4118 /* If converting fat pointer to normal pointer, get the pointer to the
4119 array and then convert it. */
4120 else if (TYPE_FAT_POINTER_P (etype))
4121 expr = build_component_ref (expr, get_identifier ("P_ARRAY"),
4122 NULL_TREE, false);
4123
4124 return fold (convert_to_pointer (type, expr));
4125
4126 case REAL_TYPE:
4127 return fold (convert_to_real (type, expr));
4128
4129 case RECORD_TYPE:
4130 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4131 return
4132 gnat_build_constructor
4133 (type, tree_cons (TYPE_FIELDS (type),
4134 convert (TREE_TYPE (TYPE_FIELDS (type)), expr),
4135 NULL_TREE));
4136
4137 /* ... fall through ... */
4138
4139 case ARRAY_TYPE:
4140 /* In these cases, assume the front-end has validated the conversion.
4141 If the conversion is valid, it will be a bit-wise conversion, so
4142 it can be viewed as an unchecked conversion. */
4143 return unchecked_convert (type, expr, false);
4144
4145 case UNION_TYPE:
4146 /* This is a either a conversion between a tagged type and some
4147 subtype, which we have to mark as a UNION_TYPE because of
4148 overlapping fields or a conversion of an Unchecked_Union. */
4149 return unchecked_convert (type, expr, false);
4150
4151 case UNCONSTRAINED_ARRAY_TYPE:
4152 /* If EXPR is a constrained array, take its address, convert it to a
4153 fat pointer, and then dereference it. Likewise if EXPR is a
4154 record containing both a template and a constrained array.
4155 Note that a record representing a justified modular type
4156 always represents a packed constrained array. */
4157 if (ecode == ARRAY_TYPE
4158 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4159 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4160 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4161 return
4162 build_unary_op
4163 (INDIRECT_REF, NULL_TREE,
4164 convert_to_fat_pointer (TREE_TYPE (type),
4165 build_unary_op (ADDR_EXPR,
4166 NULL_TREE, expr)));
4167
4168 /* Do something very similar for converting one unconstrained
4169 array to another. */
4170 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4171 return
4172 build_unary_op (INDIRECT_REF, NULL_TREE,
4173 convert (TREE_TYPE (type),
4174 build_unary_op (ADDR_EXPR,
4175 NULL_TREE, expr)));
4176 else
4177 gcc_unreachable ();
4178
4179 case COMPLEX_TYPE:
4180 return fold (convert_to_complex (type, expr));
4181
4182 default:
4183 gcc_unreachable ();
4184 }
4185 }
4186 \f
4187 /* Remove all conversions that are done in EXP. This includes converting
4188 from a padded type or to a justified modular type. If TRUE_ADDRESS
4189 is true, always return the address of the containing object even if
4190 the address is not bit-aligned. */
4191
4192 tree
4193 remove_conversions (tree exp, bool true_address)
4194 {
4195 switch (TREE_CODE (exp))
4196 {
4197 case CONSTRUCTOR:
4198 if (true_address
4199 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4200 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4201 return
4202 remove_conversions (VEC_index (constructor_elt,
4203 CONSTRUCTOR_ELTS (exp), 0)->value,
4204 true);
4205 break;
4206
4207 case COMPONENT_REF:
4208 if (TREE_CODE (TREE_TYPE (TREE_OPERAND (exp, 0))) == RECORD_TYPE
4209 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4210 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4211 break;
4212
4213 case VIEW_CONVERT_EXPR: case NON_LVALUE_EXPR:
4214 CASE_CONVERT:
4215 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4216
4217 default:
4218 break;
4219 }
4220
4221 return exp;
4222 }
4223 \f
4224 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4225 refers to the underlying array. If its type has TYPE_CONTAINS_TEMPLATE_P,
4226 likewise return an expression pointing to the underlying array. */
4227
4228 tree
4229 maybe_unconstrained_array (tree exp)
4230 {
4231 enum tree_code code = TREE_CODE (exp);
4232 tree new;
4233
4234 switch (TREE_CODE (TREE_TYPE (exp)))
4235 {
4236 case UNCONSTRAINED_ARRAY_TYPE:
4237 if (code == UNCONSTRAINED_ARRAY_REF)
4238 {
4239 new
4240 = build_unary_op (INDIRECT_REF, NULL_TREE,
4241 build_component_ref (TREE_OPERAND (exp, 0),
4242 get_identifier ("P_ARRAY"),
4243 NULL_TREE, false));
4244 TREE_READONLY (new) = TREE_STATIC (new) = TREE_READONLY (exp);
4245 return new;
4246 }
4247
4248 else if (code == NULL_EXPR)
4249 return build1 (NULL_EXPR,
4250 TREE_TYPE (TREE_TYPE (TYPE_FIELDS
4251 (TREE_TYPE (TREE_TYPE (exp))))),
4252 TREE_OPERAND (exp, 0));
4253
4254 case RECORD_TYPE:
4255 /* If this is a padded type, convert to the unpadded type and see if
4256 it contains a template. */
4257 if (TYPE_IS_PADDING_P (TREE_TYPE (exp)))
4258 {
4259 new = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (exp))), exp);
4260 if (TREE_CODE (TREE_TYPE (new)) == RECORD_TYPE
4261 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (new)))
4262 return
4263 build_component_ref (new, NULL_TREE,
4264 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (new))),
4265 0);
4266 }
4267 else if (TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (exp)))
4268 return
4269 build_component_ref (exp, NULL_TREE,
4270 TREE_CHAIN (TYPE_FIELDS (TREE_TYPE (exp))), 0);
4271 break;
4272
4273 default:
4274 break;
4275 }
4276
4277 return exp;
4278 }
4279 \f
4280 /* Return true if EXPR is an expression that can be folded as an operand
4281 of a VIEW_CONVERT_EXPR. See the head comment of unchecked_convert for
4282 the rationale. */
4283
4284 static bool
4285 can_fold_for_view_convert_p (tree expr)
4286 {
4287 tree t1, t2;
4288
4289 /* The folder will fold NOP_EXPRs between integral types with the same
4290 precision (in the middle-end's sense). We cannot allow it if the
4291 types don't have the same precision in the Ada sense as well. */
4292 if (TREE_CODE (expr) != NOP_EXPR)
4293 return true;
4294
4295 t1 = TREE_TYPE (expr);
4296 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4297
4298 /* Defer to the folder for non-integral conversions. */
4299 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4300 return true;
4301
4302 /* Only fold conversions that preserve both precisions. */
4303 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4304 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4305 return true;
4306
4307 return false;
4308 }
4309
4310 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4311 If NOTRUNC_P is true, truncation operations should be suppressed.
4312
4313 Special care is required with (source or target) integral types whose
4314 precision is not equal to their size, to make sure we fetch or assign
4315 the value bits whose location might depend on the endianness, e.g.
4316
4317 Rmsize : constant := 8;
4318 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4319
4320 type Bit_Array is array (1 .. Rmsize) of Boolean;
4321 pragma Pack (Bit_Array);
4322
4323 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4324
4325 Value : Int := 2#1000_0001#;
4326 Vbits : Bit_Array := To_Bit_Array (Value);
4327
4328 we expect the 8 bits at Vbits'Address to always contain Value, while
4329 their original location depends on the endianness, at Value'Address
4330 on a little-endian architecture but not on a big-endian one.
4331
4332 ??? There is a problematic discrepancy between what is called precision
4333 here (and more generally throughout gigi) for integral types and what is
4334 called precision in the middle-end. In the former case it's the RM size
4335 as given by TYPE_RM_SIZE (or rm_size) whereas it's TYPE_PRECISION in the
4336 latter case, the hitch being that they are not equal when they matter,
4337 that is when the number of value bits is not equal to the type's size:
4338 TYPE_RM_SIZE does give the number of value bits but TYPE_PRECISION is set
4339 to the size. The sole exception are BOOLEAN_TYPEs for which both are 1.
4340
4341 The consequence is that gigi must duplicate code bridging the gap between
4342 the type's size and its precision that exists for TYPE_PRECISION in the
4343 middle-end, because the latter knows nothing about TYPE_RM_SIZE, and be
4344 wary of transformations applied in the middle-end based on TYPE_PRECISION
4345 because this value doesn't reflect the actual precision for Ada. */
4346
4347 tree
4348 unchecked_convert (tree type, tree expr, bool notrunc_p)
4349 {
4350 tree etype = TREE_TYPE (expr);
4351
4352 /* If the expression is already the right type, we are done. */
4353 if (etype == type)
4354 return expr;
4355
4356 /* If both types types are integral just do a normal conversion.
4357 Likewise for a conversion to an unconstrained array. */
4358 if ((((INTEGRAL_TYPE_P (type)
4359 && !(TREE_CODE (type) == INTEGER_TYPE
4360 && TYPE_VAX_FLOATING_POINT_P (type)))
4361 || (POINTER_TYPE_P (type) && ! TYPE_THIN_POINTER_P (type))
4362 || (TREE_CODE (type) == RECORD_TYPE
4363 && TYPE_JUSTIFIED_MODULAR_P (type)))
4364 && ((INTEGRAL_TYPE_P (etype)
4365 && !(TREE_CODE (etype) == INTEGER_TYPE
4366 && TYPE_VAX_FLOATING_POINT_P (etype)))
4367 || (POINTER_TYPE_P (etype) && !TYPE_THIN_POINTER_P (etype))
4368 || (TREE_CODE (etype) == RECORD_TYPE
4369 && TYPE_JUSTIFIED_MODULAR_P (etype))))
4370 || TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4371 {
4372 if (TREE_CODE (etype) == INTEGER_TYPE
4373 && TYPE_BIASED_REPRESENTATION_P (etype))
4374 {
4375 tree ntype = copy_type (etype);
4376 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4377 TYPE_MAIN_VARIANT (ntype) = ntype;
4378 expr = build1 (NOP_EXPR, ntype, expr);
4379 }
4380
4381 if (TREE_CODE (type) == INTEGER_TYPE
4382 && TYPE_BIASED_REPRESENTATION_P (type))
4383 {
4384 tree rtype = copy_type (type);
4385 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4386 TYPE_MAIN_VARIANT (rtype) = rtype;
4387 expr = convert (rtype, expr);
4388 expr = build1 (NOP_EXPR, type, expr);
4389 }
4390
4391 /* We have another special case: if we are unchecked converting either
4392 a subtype or a type with limited range into a base type, we need to
4393 ensure that VRP doesn't propagate range information because this
4394 conversion may be done precisely to validate that the object is
4395 within the range it is supposed to have. */
4396 else if (TREE_CODE (expr) != INTEGER_CST
4397 && TREE_CODE (type) == INTEGER_TYPE && !TREE_TYPE (type)
4398 && ((TREE_CODE (etype) == INTEGER_TYPE && TREE_TYPE (etype))
4399 || TREE_CODE (etype) == ENUMERAL_TYPE
4400 || TREE_CODE (etype) == BOOLEAN_TYPE))
4401 {
4402 /* The optimization barrier is a VIEW_CONVERT_EXPR node; moreover,
4403 in order not to be deemed an useless type conversion, it must
4404 be from subtype to base type.
4405
4406 Therefore we first do the bulk of the conversion to a subtype of
4407 the final type. And this conversion must itself not be deemed
4408 useless if the source type is not a subtype because, otherwise,
4409 the final VIEW_CONVERT_EXPR will be deemed so as well. That's
4410 why we toggle the unsigned flag in this conversion, which is
4411 harmless since the final conversion is only a reinterpretation
4412 of the bit pattern.
4413
4414 ??? This may raise addressability and/or aliasing issues because
4415 VIEW_CONVERT_EXPR gets gimplified as an lvalue, thus causing the
4416 address of its operand to be taken if it is deemed addressable
4417 and not already in GIMPLE form. */
4418 tree rtype
4419 = gnat_type_for_mode (TYPE_MODE (type), !TYPE_UNSIGNED (etype));
4420 rtype = copy_type (rtype);
4421 TYPE_MAIN_VARIANT (rtype) = rtype;
4422 TREE_TYPE (rtype) = type;
4423 expr = convert (rtype, expr);
4424 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4425 }
4426
4427 else
4428 expr = convert (type, expr);
4429 }
4430
4431 /* If we are converting to an integral type whose precision is not equal
4432 to its size, first unchecked convert to a record that contains an
4433 object of the output type. Then extract the field. */
4434 else if (INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4435 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4436 GET_MODE_BITSIZE (TYPE_MODE (type))))
4437 {
4438 tree rec_type = make_node (RECORD_TYPE);
4439 tree field = create_field_decl (get_identifier ("OBJ"), type,
4440 rec_type, 1, 0, 0, 0);
4441
4442 TYPE_FIELDS (rec_type) = field;
4443 layout_type (rec_type);
4444
4445 expr = unchecked_convert (rec_type, expr, notrunc_p);
4446 expr = build_component_ref (expr, NULL_TREE, field, 0);
4447 }
4448
4449 /* Similarly if we are converting from an integral type whose precision
4450 is not equal to its size. */
4451 else if (INTEGRAL_TYPE_P (etype) && TYPE_RM_SIZE (etype)
4452 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4453 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4454 {
4455 tree rec_type = make_node (RECORD_TYPE);
4456 tree field
4457 = create_field_decl (get_identifier ("OBJ"), etype, rec_type,
4458 1, 0, 0, 0);
4459
4460 TYPE_FIELDS (rec_type) = field;
4461 layout_type (rec_type);
4462
4463 expr = gnat_build_constructor (rec_type, build_tree_list (field, expr));
4464 expr = unchecked_convert (type, expr, notrunc_p);
4465 }
4466
4467 /* We have a special case when we are converting between two
4468 unconstrained array types. In that case, take the address,
4469 convert the fat pointer types, and dereference. */
4470 else if (TREE_CODE (etype) == UNCONSTRAINED_ARRAY_TYPE
4471 && TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
4472 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4473 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4474 build_unary_op (ADDR_EXPR, NULL_TREE,
4475 expr)));
4476 else
4477 {
4478 expr = maybe_unconstrained_array (expr);
4479 etype = TREE_TYPE (expr);
4480 if (can_fold_for_view_convert_p (expr))
4481 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4482 else
4483 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4484 }
4485
4486 /* If the result is an integral type whose precision is not equal to its
4487 size, sign- or zero-extend the result. We need not do this if the input
4488 is an integral type of the same precision and signedness or if the output
4489 is a biased type or if both the input and output are unsigned. */
4490 if (!notrunc_p
4491 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4492 && !(TREE_CODE (type) == INTEGER_TYPE
4493 && TYPE_BIASED_REPRESENTATION_P (type))
4494 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4495 GET_MODE_BITSIZE (TYPE_MODE (type)))
4496 && !(INTEGRAL_TYPE_P (etype)
4497 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4498 && operand_equal_p (TYPE_RM_SIZE (type),
4499 (TYPE_RM_SIZE (etype) != 0
4500 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4501 0))
4502 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4503 {
4504 tree base_type = gnat_type_for_mode (TYPE_MODE (type),
4505 TYPE_UNSIGNED (type));
4506 tree shift_expr
4507 = convert (base_type,
4508 size_binop (MINUS_EXPR,
4509 bitsize_int
4510 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4511 TYPE_RM_SIZE (type)));
4512 expr
4513 = convert (type,
4514 build_binary_op (RSHIFT_EXPR, base_type,
4515 build_binary_op (LSHIFT_EXPR, base_type,
4516 convert (base_type, expr),
4517 shift_expr),
4518 shift_expr));
4519 }
4520
4521 /* An unchecked conversion should never raise Constraint_Error. The code
4522 below assumes that GCC's conversion routines overflow the same way that
4523 the underlying hardware does. This is probably true. In the rare case
4524 when it is false, we can rely on the fact that such conversions are
4525 erroneous anyway. */
4526 if (TREE_CODE (expr) == INTEGER_CST)
4527 TREE_OVERFLOW (expr) = 0;
4528
4529 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4530 show no longer constant. */
4531 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4532 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
4533 OEP_ONLY_CONST))
4534 TREE_CONSTANT (expr) = 0;
4535
4536 return expr;
4537 }
4538 \f
4539 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
4540 the latter being a record type as predicated by Is_Record_Type. */
4541
4542 enum tree_code
4543 tree_code_for_record_type (Entity_Id gnat_type)
4544 {
4545 Node_Id component_list
4546 = Component_List (Type_Definition
4547 (Declaration_Node
4548 (Implementation_Base_Type (gnat_type))));
4549 Node_Id component;
4550
4551 /* Make this a UNION_TYPE unless it's either not an Unchecked_Union or
4552 we have a non-discriminant field outside a variant. In either case,
4553 it's a RECORD_TYPE. */
4554
4555 if (!Is_Unchecked_Union (gnat_type))
4556 return RECORD_TYPE;
4557
4558 for (component = First_Non_Pragma (Component_Items (component_list));
4559 Present (component);
4560 component = Next_Non_Pragma (component))
4561 if (Ekind (Defining_Entity (component)) == E_Component)
4562 return RECORD_TYPE;
4563
4564 return UNION_TYPE;
4565 }
4566
4567 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
4568 component of an aggregate type. */
4569
4570 bool
4571 type_for_nonaliased_component_p (tree gnu_type)
4572 {
4573 /* If the type is passed by reference, we may have pointers to the
4574 component so it cannot be made non-aliased. */
4575 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
4576 return false;
4577
4578 /* We used to say that any component of aggregate type is aliased
4579 because the front-end may take 'Reference of it. The front-end
4580 has been enhanced in the meantime so as to use a renaming instead
4581 in most cases, but the back-end can probably take the address of
4582 such a component too so we go for the conservative stance.
4583
4584 For instance, we might need the address of any array type, even
4585 if normally passed by copy, to construct a fat pointer if the
4586 component is used as an actual for an unconstrained formal.
4587
4588 Likewise for record types: even if a specific record subtype is
4589 passed by copy, the parent type might be passed by ref (e.g. if
4590 it's of variable size) and we might take the address of a child
4591 component to pass to a parent formal. We have no way to check
4592 for such conditions here. */
4593 if (AGGREGATE_TYPE_P (gnu_type))
4594 return false;
4595
4596 return true;
4597 }
4598
4599 /* Perform final processing on global variables. */
4600
4601 void
4602 gnat_write_global_declarations (void)
4603 {
4604 /* Proceed to optimize and emit assembly.
4605 FIXME: shouldn't be the front end's responsibility to call this. */
4606 cgraph_optimize ();
4607
4608 /* Emit debug info for all global declarations. */
4609 emit_debug_global_declarations (VEC_address (tree, global_decls),
4610 VEC_length (tree, global_decls));
4611 }
4612
4613 /* ************************************************************************
4614 * * GCC builtins support *
4615 * ************************************************************************ */
4616
4617 /* The general scheme is fairly simple:
4618
4619 For each builtin function/type to be declared, gnat_install_builtins calls
4620 internal facilities which eventually get to gnat_push_decl, which in turn
4621 tracks the so declared builtin function decls in the 'builtin_decls' global
4622 datastructure. When an Intrinsic subprogram declaration is processed, we
4623 search this global datastructure to retrieve the associated BUILT_IN DECL
4624 node. */
4625
4626 /* Search the chain of currently available builtin declarations for a node
4627 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
4628 found, if any, or NULL_TREE otherwise. */
4629 tree
4630 builtin_decl_for (tree name)
4631 {
4632 unsigned i;
4633 tree decl;
4634
4635 for (i = 0; VEC_iterate(tree, builtin_decls, i, decl); i++)
4636 if (DECL_NAME (decl) == name)
4637 return decl;
4638
4639 return NULL_TREE;
4640 }
4641
4642 /* The code below eventually exposes gnat_install_builtins, which declares
4643 the builtin types and functions we might need, either internally or as
4644 user accessible facilities.
4645
4646 ??? This is a first implementation shot, still in rough shape. It is
4647 heavily inspired from the "C" family implementation, with chunks copied
4648 verbatim from there.
4649
4650 Two obvious TODO candidates are
4651 o Use a more efficient name/decl mapping scheme
4652 o Devise a middle-end infrastructure to avoid having to copy
4653 pieces between front-ends. */
4654
4655 /* ----------------------------------------------------------------------- *
4656 * BUILTIN ELEMENTARY TYPES *
4657 * ----------------------------------------------------------------------- */
4658
4659 /* Standard data types to be used in builtin argument declarations. */
4660
4661 enum c_tree_index
4662 {
4663 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
4664 CTI_STRING_TYPE,
4665 CTI_CONST_STRING_TYPE,
4666
4667 CTI_MAX
4668 };
4669
4670 static tree c_global_trees[CTI_MAX];
4671
4672 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
4673 #define string_type_node c_global_trees[CTI_STRING_TYPE]
4674 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
4675
4676 /* ??? In addition some attribute handlers, we currently don't support a
4677 (small) number of builtin-types, which in turns inhibits support for a
4678 number of builtin functions. */
4679 #define wint_type_node void_type_node
4680 #define intmax_type_node void_type_node
4681 #define uintmax_type_node void_type_node
4682
4683 /* Build the void_list_node (void_type_node having been created). */
4684
4685 static tree
4686 build_void_list_node (void)
4687 {
4688 tree t = build_tree_list (NULL_TREE, void_type_node);
4689 return t;
4690 }
4691
4692 /* Used to help initialize the builtin-types.def table. When a type of
4693 the correct size doesn't exist, use error_mark_node instead of NULL.
4694 The later results in segfaults even when a decl using the type doesn't
4695 get invoked. */
4696
4697 static tree
4698 builtin_type_for_size (int size, bool unsignedp)
4699 {
4700 tree type = lang_hooks.types.type_for_size (size, unsignedp);
4701 return type ? type : error_mark_node;
4702 }
4703
4704 /* Build/push the elementary type decls that builtin functions/types
4705 will need. */
4706
4707 static void
4708 install_builtin_elementary_types (void)
4709 {
4710 signed_size_type_node = size_type_node;
4711 pid_type_node = integer_type_node;
4712 void_list_node = build_void_list_node ();
4713
4714 string_type_node = build_pointer_type (char_type_node);
4715 const_string_type_node
4716 = build_pointer_type (build_qualified_type
4717 (char_type_node, TYPE_QUAL_CONST));
4718 }
4719
4720 /* ----------------------------------------------------------------------- *
4721 * BUILTIN FUNCTION TYPES *
4722 * ----------------------------------------------------------------------- */
4723
4724 /* Now, builtin function types per se. */
4725
4726 enum c_builtin_type
4727 {
4728 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
4729 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
4730 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
4731 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
4732 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4733 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4734 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
4735 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
4736 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
4737 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
4738 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
4739 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
4740 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
4741 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
4742 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
4743 NAME,
4744 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
4745 #include "builtin-types.def"
4746 #undef DEF_PRIMITIVE_TYPE
4747 #undef DEF_FUNCTION_TYPE_0
4748 #undef DEF_FUNCTION_TYPE_1
4749 #undef DEF_FUNCTION_TYPE_2
4750 #undef DEF_FUNCTION_TYPE_3
4751 #undef DEF_FUNCTION_TYPE_4
4752 #undef DEF_FUNCTION_TYPE_5
4753 #undef DEF_FUNCTION_TYPE_6
4754 #undef DEF_FUNCTION_TYPE_7
4755 #undef DEF_FUNCTION_TYPE_VAR_0
4756 #undef DEF_FUNCTION_TYPE_VAR_1
4757 #undef DEF_FUNCTION_TYPE_VAR_2
4758 #undef DEF_FUNCTION_TYPE_VAR_3
4759 #undef DEF_FUNCTION_TYPE_VAR_4
4760 #undef DEF_FUNCTION_TYPE_VAR_5
4761 #undef DEF_POINTER_TYPE
4762 BT_LAST
4763 };
4764
4765 typedef enum c_builtin_type builtin_type;
4766
4767 /* A temporary array used in communication with def_fn_type. */
4768 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
4769
4770 /* A helper function for install_builtin_types. Build function type
4771 for DEF with return type RET and N arguments. If VAR is true, then the
4772 function should be variadic after those N arguments.
4773
4774 Takes special care not to ICE if any of the types involved are
4775 error_mark_node, which indicates that said type is not in fact available
4776 (see builtin_type_for_size). In which case the function type as a whole
4777 should be error_mark_node. */
4778
4779 static void
4780 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
4781 {
4782 tree args = NULL, t;
4783 va_list list;
4784 int i;
4785
4786 va_start (list, n);
4787 for (i = 0; i < n; ++i)
4788 {
4789 builtin_type a = va_arg (list, builtin_type);
4790 t = builtin_types[a];
4791 if (t == error_mark_node)
4792 goto egress;
4793 args = tree_cons (NULL_TREE, t, args);
4794 }
4795 va_end (list);
4796
4797 args = nreverse (args);
4798 if (!var)
4799 args = chainon (args, void_list_node);
4800
4801 t = builtin_types[ret];
4802 if (t == error_mark_node)
4803 goto egress;
4804 t = build_function_type (t, args);
4805
4806 egress:
4807 builtin_types[def] = t;
4808 }
4809
4810 /* Build the builtin function types and install them in the builtin_types
4811 array for later use in builtin function decls. */
4812
4813 static void
4814 install_builtin_function_types (void)
4815 {
4816 tree va_list_ref_type_node;
4817 tree va_list_arg_type_node;
4818
4819 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
4820 {
4821 va_list_arg_type_node = va_list_ref_type_node =
4822 build_pointer_type (TREE_TYPE (va_list_type_node));
4823 }
4824 else
4825 {
4826 va_list_arg_type_node = va_list_type_node;
4827 va_list_ref_type_node = build_reference_type (va_list_type_node);
4828 }
4829
4830 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
4831 builtin_types[ENUM] = VALUE;
4832 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
4833 def_fn_type (ENUM, RETURN, 0, 0);
4834 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
4835 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
4836 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
4837 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
4838 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4839 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
4840 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4841 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
4842 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4843 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4844 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4845 ARG6) \
4846 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
4847 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
4848 ARG6, ARG7) \
4849 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
4850 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
4851 def_fn_type (ENUM, RETURN, 1, 0);
4852 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
4853 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
4854 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
4855 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
4856 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
4857 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
4858 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
4859 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
4860 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
4861 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
4862 #define DEF_POINTER_TYPE(ENUM, TYPE) \
4863 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
4864
4865 #include "builtin-types.def"
4866
4867 #undef DEF_PRIMITIVE_TYPE
4868 #undef DEF_FUNCTION_TYPE_1
4869 #undef DEF_FUNCTION_TYPE_2
4870 #undef DEF_FUNCTION_TYPE_3
4871 #undef DEF_FUNCTION_TYPE_4
4872 #undef DEF_FUNCTION_TYPE_5
4873 #undef DEF_FUNCTION_TYPE_6
4874 #undef DEF_FUNCTION_TYPE_VAR_0
4875 #undef DEF_FUNCTION_TYPE_VAR_1
4876 #undef DEF_FUNCTION_TYPE_VAR_2
4877 #undef DEF_FUNCTION_TYPE_VAR_3
4878 #undef DEF_FUNCTION_TYPE_VAR_4
4879 #undef DEF_FUNCTION_TYPE_VAR_5
4880 #undef DEF_POINTER_TYPE
4881 builtin_types[(int) BT_LAST] = NULL_TREE;
4882 }
4883
4884 /* ----------------------------------------------------------------------- *
4885 * BUILTIN ATTRIBUTES *
4886 * ----------------------------------------------------------------------- */
4887
4888 enum built_in_attribute
4889 {
4890 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
4891 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
4892 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
4893 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
4894 #include "builtin-attrs.def"
4895 #undef DEF_ATTR_NULL_TREE
4896 #undef DEF_ATTR_INT
4897 #undef DEF_ATTR_IDENT
4898 #undef DEF_ATTR_TREE_LIST
4899 ATTR_LAST
4900 };
4901
4902 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
4903
4904 static void
4905 install_builtin_attributes (void)
4906 {
4907 /* Fill in the built_in_attributes array. */
4908 #define DEF_ATTR_NULL_TREE(ENUM) \
4909 built_in_attributes[(int) ENUM] = NULL_TREE;
4910 #define DEF_ATTR_INT(ENUM, VALUE) \
4911 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
4912 #define DEF_ATTR_IDENT(ENUM, STRING) \
4913 built_in_attributes[(int) ENUM] = get_identifier (STRING);
4914 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
4915 built_in_attributes[(int) ENUM] \
4916 = tree_cons (built_in_attributes[(int) PURPOSE], \
4917 built_in_attributes[(int) VALUE], \
4918 built_in_attributes[(int) CHAIN]);
4919 #include "builtin-attrs.def"
4920 #undef DEF_ATTR_NULL_TREE
4921 #undef DEF_ATTR_INT
4922 #undef DEF_ATTR_IDENT
4923 #undef DEF_ATTR_TREE_LIST
4924 }
4925
4926 /* Handle a "const" attribute; arguments as in
4927 struct attribute_spec.handler. */
4928
4929 static tree
4930 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
4931 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4932 bool *no_add_attrs)
4933 {
4934 if (TREE_CODE (*node) == FUNCTION_DECL)
4935 TREE_READONLY (*node) = 1;
4936 else
4937 *no_add_attrs = true;
4938
4939 return NULL_TREE;
4940 }
4941
4942 /* Handle a "nothrow" attribute; arguments as in
4943 struct attribute_spec.handler. */
4944
4945 static tree
4946 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
4947 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4948 bool *no_add_attrs)
4949 {
4950 if (TREE_CODE (*node) == FUNCTION_DECL)
4951 TREE_NOTHROW (*node) = 1;
4952 else
4953 *no_add_attrs = true;
4954
4955 return NULL_TREE;
4956 }
4957
4958 /* Handle a "pure" attribute; arguments as in
4959 struct attribute_spec.handler. */
4960
4961 static tree
4962 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
4963 int ARG_UNUSED (flags), bool *no_add_attrs)
4964 {
4965 if (TREE_CODE (*node) == FUNCTION_DECL)
4966 DECL_PURE_P (*node) = 1;
4967 /* ??? TODO: Support types. */
4968 else
4969 {
4970 warning (OPT_Wattributes, "%qE attribute ignored", name);
4971 *no_add_attrs = true;
4972 }
4973
4974 return NULL_TREE;
4975 }
4976
4977 /* Handle a "no vops" attribute; arguments as in
4978 struct attribute_spec.handler. */
4979
4980 static tree
4981 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
4982 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
4983 bool *ARG_UNUSED (no_add_attrs))
4984 {
4985 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
4986 DECL_IS_NOVOPS (*node) = 1;
4987 return NULL_TREE;
4988 }
4989
4990 /* Helper for nonnull attribute handling; fetch the operand number
4991 from the attribute argument list. */
4992
4993 static bool
4994 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
4995 {
4996 /* Verify the arg number is a constant. */
4997 if (TREE_CODE (arg_num_expr) != INTEGER_CST
4998 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
4999 return false;
5000
5001 *valp = TREE_INT_CST_LOW (arg_num_expr);
5002 return true;
5003 }
5004
5005 /* Handle the "nonnull" attribute. */
5006 static tree
5007 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5008 tree args, int ARG_UNUSED (flags),
5009 bool *no_add_attrs)
5010 {
5011 tree type = *node;
5012 unsigned HOST_WIDE_INT attr_arg_num;
5013
5014 /* If no arguments are specified, all pointer arguments should be
5015 non-null. Verify a full prototype is given so that the arguments
5016 will have the correct types when we actually check them later. */
5017 if (!args)
5018 {
5019 if (!TYPE_ARG_TYPES (type))
5020 {
5021 error ("nonnull attribute without arguments on a non-prototype");
5022 *no_add_attrs = true;
5023 }
5024 return NULL_TREE;
5025 }
5026
5027 /* Argument list specified. Verify that each argument number references
5028 a pointer argument. */
5029 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5030 {
5031 tree argument;
5032 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5033
5034 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5035 {
5036 error ("nonnull argument has invalid operand number (argument %lu)",
5037 (unsigned long) attr_arg_num);
5038 *no_add_attrs = true;
5039 return NULL_TREE;
5040 }
5041
5042 argument = TYPE_ARG_TYPES (type);
5043 if (argument)
5044 {
5045 for (ck_num = 1; ; ck_num++)
5046 {
5047 if (!argument || ck_num == arg_num)
5048 break;
5049 argument = TREE_CHAIN (argument);
5050 }
5051
5052 if (!argument
5053 || TREE_CODE (TREE_VALUE (argument)) == VOID_TYPE)
5054 {
5055 error ("nonnull argument with out-of-range operand number (argument %lu, operand %lu)",
5056 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5057 *no_add_attrs = true;
5058 return NULL_TREE;
5059 }
5060
5061 if (TREE_CODE (TREE_VALUE (argument)) != POINTER_TYPE)
5062 {
5063 error ("nonnull argument references non-pointer operand (argument %lu, operand %lu)",
5064 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5065 *no_add_attrs = true;
5066 return NULL_TREE;
5067 }
5068 }
5069 }
5070
5071 return NULL_TREE;
5072 }
5073
5074 /* Handle a "sentinel" attribute. */
5075
5076 static tree
5077 handle_sentinel_attribute (tree *node, tree name, tree args,
5078 int ARG_UNUSED (flags), bool *no_add_attrs)
5079 {
5080 tree params = TYPE_ARG_TYPES (*node);
5081
5082 if (!params)
5083 {
5084 warning (OPT_Wattributes,
5085 "%qE attribute requires prototypes with named arguments", name);
5086 *no_add_attrs = true;
5087 }
5088 else
5089 {
5090 while (TREE_CHAIN (params))
5091 params = TREE_CHAIN (params);
5092
5093 if (VOID_TYPE_P (TREE_VALUE (params)))
5094 {
5095 warning (OPT_Wattributes,
5096 "%qE attribute only applies to variadic functions", name);
5097 *no_add_attrs = true;
5098 }
5099 }
5100
5101 if (args)
5102 {
5103 tree position = TREE_VALUE (args);
5104
5105 if (TREE_CODE (position) != INTEGER_CST)
5106 {
5107 warning (0, "requested position is not an integer constant");
5108 *no_add_attrs = true;
5109 }
5110 else
5111 {
5112 if (tree_int_cst_lt (position, integer_zero_node))
5113 {
5114 warning (0, "requested position is less than zero");
5115 *no_add_attrs = true;
5116 }
5117 }
5118 }
5119
5120 return NULL_TREE;
5121 }
5122
5123 /* Handle a "noreturn" attribute; arguments as in
5124 struct attribute_spec.handler. */
5125
5126 static tree
5127 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5128 int ARG_UNUSED (flags), bool *no_add_attrs)
5129 {
5130 tree type = TREE_TYPE (*node);
5131
5132 /* See FIXME comment in c_common_attribute_table. */
5133 if (TREE_CODE (*node) == FUNCTION_DECL)
5134 TREE_THIS_VOLATILE (*node) = 1;
5135 else if (TREE_CODE (type) == POINTER_TYPE
5136 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5137 TREE_TYPE (*node)
5138 = build_pointer_type
5139 (build_type_variant (TREE_TYPE (type),
5140 TYPE_READONLY (TREE_TYPE (type)), 1));
5141 else
5142 {
5143 warning (OPT_Wattributes, "%qE attribute ignored", name);
5144 *no_add_attrs = true;
5145 }
5146
5147 return NULL_TREE;
5148 }
5149
5150 /* Handle a "malloc" attribute; arguments as in
5151 struct attribute_spec.handler. */
5152
5153 static tree
5154 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5155 int ARG_UNUSED (flags), bool *no_add_attrs)
5156 {
5157 if (TREE_CODE (*node) == FUNCTION_DECL
5158 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5159 DECL_IS_MALLOC (*node) = 1;
5160 else
5161 {
5162 warning (OPT_Wattributes, "%qE attribute ignored", name);
5163 *no_add_attrs = true;
5164 }
5165
5166 return NULL_TREE;
5167 }
5168
5169 /* Fake handler for attributes we don't properly support. */
5170
5171 tree
5172 fake_attribute_handler (tree * ARG_UNUSED (node),
5173 tree ARG_UNUSED (name),
5174 tree ARG_UNUSED (args),
5175 int ARG_UNUSED (flags),
5176 bool * ARG_UNUSED (no_add_attrs))
5177 {
5178 return NULL_TREE;
5179 }
5180
5181 /* Handle a "type_generic" attribute. */
5182
5183 static tree
5184 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5185 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5186 bool * ARG_UNUSED (no_add_attrs))
5187 {
5188 tree params;
5189
5190 /* Ensure we have a function type. */
5191 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5192
5193 params = TYPE_ARG_TYPES (*node);
5194 while (params && ! VOID_TYPE_P (TREE_VALUE (params)))
5195 params = TREE_CHAIN (params);
5196
5197 /* Ensure we have a variadic function. */
5198 gcc_assert (!params);
5199
5200 return NULL_TREE;
5201 }
5202
5203 /* ----------------------------------------------------------------------- *
5204 * BUILTIN FUNCTIONS *
5205 * ----------------------------------------------------------------------- */
5206
5207 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5208 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5209 if nonansi_p and flag_no_nonansi_builtin. */
5210
5211 static void
5212 def_builtin_1 (enum built_in_function fncode,
5213 const char *name,
5214 enum built_in_class fnclass,
5215 tree fntype, tree libtype,
5216 bool both_p, bool fallback_p,
5217 bool nonansi_p ATTRIBUTE_UNUSED,
5218 tree fnattrs, bool implicit_p)
5219 {
5220 tree decl;
5221 const char *libname;
5222
5223 /* Preserve an already installed decl. It most likely was setup in advance
5224 (e.g. as part of the internal builtins) for specific reasons. */
5225 if (built_in_decls[(int) fncode] != NULL_TREE)
5226 return;
5227
5228 gcc_assert ((!both_p && !fallback_p)
5229 || !strncmp (name, "__builtin_",
5230 strlen ("__builtin_")));
5231
5232 libname = name + strlen ("__builtin_");
5233 decl = add_builtin_function (name, fntype, fncode, fnclass,
5234 (fallback_p ? libname : NULL),
5235 fnattrs);
5236 if (both_p)
5237 /* ??? This is normally further controlled by command-line options
5238 like -fno-builtin, but we don't have them for Ada. */
5239 add_builtin_function (libname, libtype, fncode, fnclass,
5240 NULL, fnattrs);
5241
5242 built_in_decls[(int) fncode] = decl;
5243 if (implicit_p)
5244 implicit_built_in_decls[(int) fncode] = decl;
5245 }
5246
5247 static int flag_isoc94 = 0;
5248 static int flag_isoc99 = 0;
5249
5250 /* Install what the common builtins.def offers. */
5251
5252 static void
5253 install_builtin_functions (void)
5254 {
5255 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5256 NONANSI_P, ATTRS, IMPLICIT, COND) \
5257 if (NAME && COND) \
5258 def_builtin_1 (ENUM, NAME, CLASS, \
5259 builtin_types[(int) TYPE], \
5260 builtin_types[(int) LIBTYPE], \
5261 BOTH_P, FALLBACK_P, NONANSI_P, \
5262 built_in_attributes[(int) ATTRS], IMPLICIT);
5263 #include "builtins.def"
5264 #undef DEF_BUILTIN
5265 }
5266
5267 /* ----------------------------------------------------------------------- *
5268 * BUILTIN FUNCTIONS *
5269 * ----------------------------------------------------------------------- */
5270
5271 /* Install the builtin functions we might need. */
5272
5273 void
5274 gnat_install_builtins (void)
5275 {
5276 install_builtin_elementary_types ();
5277 install_builtin_function_types ();
5278 install_builtin_attributes ();
5279
5280 /* Install builtins used by generic middle-end pieces first. Some of these
5281 know about internal specificities and control attributes accordingly, for
5282 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5283 the generic definition from builtins.def. */
5284 build_common_builtin_nodes ();
5285
5286 /* Now, install the target specific builtins, such as the AltiVec family on
5287 ppc, and the common set as exposed by builtins.def. */
5288 targetm.init_builtins ();
5289 install_builtin_functions ();
5290 }
5291
5292 #include "gt-ada-utils.h"
5293 #include "gtype-ada.h"