]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils.c
Remove unnecessary VEC function overloads.
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2012, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "toplev.h"
33 #include "diagnostic-core.h"
34 #include "output.h"
35 #include "ggc.h"
36 #include "debug.h"
37 #include "convert.h"
38 #include "target.h"
39 #include "common/common-target.h"
40 #include "langhooks.h"
41 #include "cgraph.h"
42 #include "diagnostic.h"
43 #include "timevar.h"
44 #include "tree-dump.h"
45 #include "tree-inline.h"
46 #include "tree-iterator.h"
47
48 #include "ada.h"
49 #include "types.h"
50 #include "atree.h"
51 #include "elists.h"
52 #include "namet.h"
53 #include "nlists.h"
54 #include "stringt.h"
55 #include "uintp.h"
56 #include "fe.h"
57 #include "sinfo.h"
58 #include "einfo.h"
59 #include "ada-tree.h"
60 #include "gigi.h"
61
62 /* If nonzero, pretend we are allocating at global level. */
63 int force_global;
64
65 /* The default alignment of "double" floating-point types, i.e. floating
66 point types whose size is equal to 64 bits, or 0 if this alignment is
67 not specifically capped. */
68 int double_float_alignment;
69
70 /* The default alignment of "double" or larger scalar types, i.e. scalar
71 types whose size is greater or equal to 64 bits, or 0 if this alignment
72 is not specifically capped. */
73 int double_scalar_alignment;
74
75 /* Tree nodes for the various types and decls we create. */
76 tree gnat_std_decls[(int) ADT_LAST];
77
78 /* Functions to call for each of the possible raise reasons. */
79 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
80
81 /* Likewise, but with extra info for each of the possible raise reasons. */
82 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
83
84 /* Forward declarations for handlers of attributes. */
85 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
86 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
87 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
88 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
89 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
90 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
91 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
92 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
97
98 /* Fake handler for attributes we don't properly support, typically because
99 they'd require dragging a lot of the common-c front-end circuitry. */
100 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
101
102 /* Table of machine-independent internal attributes for Ada. We support
103 this minimal set of attributes to accommodate the needs of builtins. */
104 const struct attribute_spec gnat_internal_attribute_table[] =
105 {
106 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
107 affects_type_identity } */
108 { "const", 0, 0, true, false, false, handle_const_attribute,
109 false },
110 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
111 false },
112 { "pure", 0, 0, true, false, false, handle_pure_attribute,
113 false },
114 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
115 false },
116 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
117 false },
118 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
119 false },
120 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
121 false },
122 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
123 false },
124 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
125 false },
126 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
127 false },
128
129 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
130 false },
131 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
132 false },
133 { "may_alias", 0, 0, false, true, false, NULL, false },
134
135 /* ??? format and format_arg are heavy and not supported, which actually
136 prevents support for stdio builtins, which we however declare as part
137 of the common builtins.def contents. */
138 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
139 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
140
141 { NULL, 0, 0, false, false, false, NULL, false }
142 };
143
144 /* Associates a GNAT tree node to a GCC tree node. It is used in
145 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
146 of `save_gnu_tree' for more info. */
147 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
148
149 #define GET_GNU_TREE(GNAT_ENTITY) \
150 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
151
152 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
153 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
154
155 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
156 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
157
158 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
159 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
160
161 #define GET_DUMMY_NODE(GNAT_ENTITY) \
162 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
163
164 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
165 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
166
167 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
168 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
169
170 /* This variable keeps a table for types for each precision so that we only
171 allocate each of them once. Signed and unsigned types are kept separate.
172
173 Note that these types are only used when fold-const requests something
174 special. Perhaps we should NOT share these types; we'll see how it
175 goes later. */
176 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
177
178 /* Likewise for float types, but record these by mode. */
179 static GTY(()) tree float_types[NUM_MACHINE_MODES];
180
181 /* For each binding contour we allocate a binding_level structure to indicate
182 the binding depth. */
183
184 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
185 /* The binding level containing this one (the enclosing binding level). */
186 struct gnat_binding_level *chain;
187 /* The BLOCK node for this level. */
188 tree block;
189 /* If nonzero, the setjmp buffer that needs to be updated for any
190 variable-sized definition within this context. */
191 tree jmpbuf_decl;
192 };
193
194 /* The binding level currently in effect. */
195 static GTY(()) struct gnat_binding_level *current_binding_level;
196
197 /* A chain of gnat_binding_level structures awaiting reuse. */
198 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
199
200 /* The context to be used for global declarations. */
201 static GTY(()) tree global_context;
202
203 /* An array of global declarations. */
204 static GTY(()) VEC(tree,gc) *global_decls;
205
206 /* An array of builtin function declarations. */
207 static GTY(()) VEC(tree,gc) *builtin_decls;
208
209 /* An array of global renaming pointers. */
210 static GTY(()) VEC(tree,gc) *global_renaming_pointers;
211
212 /* A chain of unused BLOCK nodes. */
213 static GTY((deletable)) tree free_block_chain;
214
215 static int pad_type_hash_marked_p (const void *p);
216 static hashval_t pad_type_hash_hash (const void *p);
217 static int pad_type_hash_eq (const void *p1, const void *p2);
218
219 /* A hash table of padded types. It is modelled on the generic type
220 hash table in tree.c, which must thus be used as a reference. */
221 struct GTY(()) pad_type_hash {
222 unsigned long hash;
223 tree type;
224 };
225
226 static GTY ((if_marked ("pad_type_hash_marked_p"),
227 param_is (struct pad_type_hash)))
228 htab_t pad_type_hash_table;
229
230 static tree merge_sizes (tree, tree, tree, bool, bool);
231 static tree compute_related_constant (tree, tree);
232 static tree split_plus (tree, tree *);
233 static tree float_type_for_precision (int, enum machine_mode);
234 static tree convert_to_fat_pointer (tree, tree);
235 static bool potential_alignment_gap (tree, tree, tree);
236 static void process_attributes (tree, struct attrib *);
237 \f
238 /* Initialize data structures of the utils.c module. */
239
240 void
241 init_gnat_utils (void)
242 {
243 /* Initialize the association of GNAT nodes to GCC trees. */
244 associate_gnat_to_gnu = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
245
246 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
247 dummy_node_table = ggc_alloc_cleared_vec_tree (max_gnat_nodes);
248
249 /* Initialize the hash table of padded types. */
250 pad_type_hash_table = htab_create_ggc (512, pad_type_hash_hash,
251 pad_type_hash_eq, 0);
252 }
253
254 /* Destroy data structures of the utils.c module. */
255
256 void
257 destroy_gnat_utils (void)
258 {
259 /* Destroy the association of GNAT nodes to GCC trees. */
260 ggc_free (associate_gnat_to_gnu);
261 associate_gnat_to_gnu = NULL;
262
263 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
264 ggc_free (dummy_node_table);
265 dummy_node_table = NULL;
266
267 /* Destroy the hash table of padded types. */
268 htab_delete (pad_type_hash_table);
269 pad_type_hash_table = NULL;
270
271 /* Invalidate the global renaming pointers. */
272 invalidate_global_renaming_pointers ();
273 }
274 \f
275 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
276 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
277 If NO_CHECK is true, the latter check is suppressed.
278
279 If GNU_DECL is zero, reset a previous association. */
280
281 void
282 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
283 {
284 /* Check that GNAT_ENTITY is not already defined and that it is being set
285 to something which is a decl. If that is not the case, this usually
286 means GNAT_ENTITY is defined twice, but occasionally is due to some
287 Gigi problem. */
288 gcc_assert (!(gnu_decl
289 && (PRESENT_GNU_TREE (gnat_entity)
290 || (!no_check && !DECL_P (gnu_decl)))));
291
292 SET_GNU_TREE (gnat_entity, gnu_decl);
293 }
294
295 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
296 that was associated with it. If there is no such tree node, abort.
297
298 In some cases, such as delayed elaboration or expressions that need to
299 be elaborated only once, GNAT_ENTITY is really not an entity. */
300
301 tree
302 get_gnu_tree (Entity_Id gnat_entity)
303 {
304 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
305 return GET_GNU_TREE (gnat_entity);
306 }
307
308 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
309
310 bool
311 present_gnu_tree (Entity_Id gnat_entity)
312 {
313 return PRESENT_GNU_TREE (gnat_entity);
314 }
315 \f
316 /* Make a dummy type corresponding to GNAT_TYPE. */
317
318 tree
319 make_dummy_type (Entity_Id gnat_type)
320 {
321 Entity_Id gnat_underlying = Gigi_Equivalent_Type (gnat_type);
322 tree gnu_type;
323
324 /* If there is an equivalent type, get its underlying type. */
325 if (Present (gnat_underlying))
326 gnat_underlying = Gigi_Equivalent_Type (Underlying_Type (gnat_underlying));
327
328 /* If there was no equivalent type (can only happen when just annotating
329 types) or underlying type, go back to the original type. */
330 if (No (gnat_underlying))
331 gnat_underlying = gnat_type;
332
333 /* If it there already a dummy type, use that one. Else make one. */
334 if (PRESENT_DUMMY_NODE (gnat_underlying))
335 return GET_DUMMY_NODE (gnat_underlying);
336
337 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
338 an ENUMERAL_TYPE. */
339 gnu_type = make_node (Is_Record_Type (gnat_underlying)
340 ? tree_code_for_record_type (gnat_underlying)
341 : ENUMERAL_TYPE);
342 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
343 TYPE_DUMMY_P (gnu_type) = 1;
344 TYPE_STUB_DECL (gnu_type)
345 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
346 if (Is_By_Reference_Type (gnat_underlying))
347 TYPE_BY_REFERENCE_P (gnu_type) = 1;
348
349 SET_DUMMY_NODE (gnat_underlying, gnu_type);
350
351 return gnu_type;
352 }
353
354 /* Return the dummy type that was made for GNAT_TYPE, if any. */
355
356 tree
357 get_dummy_type (Entity_Id gnat_type)
358 {
359 return GET_DUMMY_NODE (gnat_type);
360 }
361
362 /* Build dummy fat and thin pointer types whose designated type is specified
363 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
364
365 void
366 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
367 {
368 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
369 tree gnu_fat_type, fields, gnu_object_type;
370
371 gnu_template_type = make_node (RECORD_TYPE);
372 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
373 TYPE_DUMMY_P (gnu_template_type) = 1;
374 gnu_ptr_template = build_pointer_type (gnu_template_type);
375
376 gnu_array_type = make_node (ENUMERAL_TYPE);
377 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
378 TYPE_DUMMY_P (gnu_array_type) = 1;
379 gnu_ptr_array = build_pointer_type (gnu_array_type);
380
381 gnu_fat_type = make_node (RECORD_TYPE);
382 /* Build a stub DECL to trigger the special processing for fat pointer types
383 in gnat_pushdecl. */
384 TYPE_NAME (gnu_fat_type)
385 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
386 gnu_fat_type);
387 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
388 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
389 DECL_CHAIN (fields)
390 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
391 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
392 finish_fat_pointer_type (gnu_fat_type, fields);
393 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
394 /* Suppress debug info until after the type is completed. */
395 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
396
397 gnu_object_type = make_node (RECORD_TYPE);
398 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
399 TYPE_DUMMY_P (gnu_object_type) = 1;
400
401 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
402 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
403 }
404 \f
405 /* Return true if we are in the global binding level. */
406
407 bool
408 global_bindings_p (void)
409 {
410 return force_global || current_function_decl == NULL_TREE;
411 }
412
413 /* Enter a new binding level. */
414
415 void
416 gnat_pushlevel (void)
417 {
418 struct gnat_binding_level *newlevel = NULL;
419
420 /* Reuse a struct for this binding level, if there is one. */
421 if (free_binding_level)
422 {
423 newlevel = free_binding_level;
424 free_binding_level = free_binding_level->chain;
425 }
426 else
427 newlevel = ggc_alloc_gnat_binding_level ();
428
429 /* Use a free BLOCK, if any; otherwise, allocate one. */
430 if (free_block_chain)
431 {
432 newlevel->block = free_block_chain;
433 free_block_chain = BLOCK_CHAIN (free_block_chain);
434 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
435 }
436 else
437 newlevel->block = make_node (BLOCK);
438
439 /* Point the BLOCK we just made to its parent. */
440 if (current_binding_level)
441 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
442
443 BLOCK_VARS (newlevel->block) = NULL_TREE;
444 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
445 TREE_USED (newlevel->block) = 1;
446
447 /* Add this level to the front of the chain (stack) of active levels. */
448 newlevel->chain = current_binding_level;
449 newlevel->jmpbuf_decl = NULL_TREE;
450 current_binding_level = newlevel;
451 }
452
453 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
454 and point FNDECL to this BLOCK. */
455
456 void
457 set_current_block_context (tree fndecl)
458 {
459 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
460 DECL_INITIAL (fndecl) = current_binding_level->block;
461 set_block_for_group (current_binding_level->block);
462 }
463
464 /* Set the jmpbuf_decl for the current binding level to DECL. */
465
466 void
467 set_block_jmpbuf_decl (tree decl)
468 {
469 current_binding_level->jmpbuf_decl = decl;
470 }
471
472 /* Get the jmpbuf_decl, if any, for the current binding level. */
473
474 tree
475 get_block_jmpbuf_decl (void)
476 {
477 return current_binding_level->jmpbuf_decl;
478 }
479
480 /* Exit a binding level. Set any BLOCK into the current code group. */
481
482 void
483 gnat_poplevel (void)
484 {
485 struct gnat_binding_level *level = current_binding_level;
486 tree block = level->block;
487
488 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
489 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
490
491 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
492 are no variables free the block and merge its subblocks into those of its
493 parent block. Otherwise, add it to the list of its parent. */
494 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
495 ;
496 else if (BLOCK_VARS (block) == NULL_TREE)
497 {
498 BLOCK_SUBBLOCKS (level->chain->block)
499 = block_chainon (BLOCK_SUBBLOCKS (block),
500 BLOCK_SUBBLOCKS (level->chain->block));
501 BLOCK_CHAIN (block) = free_block_chain;
502 free_block_chain = block;
503 }
504 else
505 {
506 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
507 BLOCK_SUBBLOCKS (level->chain->block) = block;
508 TREE_USED (block) = 1;
509 set_block_for_group (block);
510 }
511
512 /* Free this binding structure. */
513 current_binding_level = level->chain;
514 level->chain = free_binding_level;
515 free_binding_level = level;
516 }
517
518 /* Exit a binding level and discard the associated BLOCK. */
519
520 void
521 gnat_zaplevel (void)
522 {
523 struct gnat_binding_level *level = current_binding_level;
524 tree block = level->block;
525
526 BLOCK_CHAIN (block) = free_block_chain;
527 free_block_chain = block;
528
529 /* Free this binding structure. */
530 current_binding_level = level->chain;
531 level->chain = free_binding_level;
532 free_binding_level = level;
533 }
534 \f
535 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
536 for location information and flag propagation. */
537
538 void
539 gnat_pushdecl (tree decl, Node_Id gnat_node)
540 {
541 /* If DECL is public external or at top level, it has global context. */
542 if ((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || global_bindings_p ())
543 {
544 if (!global_context)
545 global_context = build_translation_unit_decl (NULL_TREE);
546 DECL_CONTEXT (decl) = global_context;
547 }
548 else
549 {
550 DECL_CONTEXT (decl) = current_function_decl;
551
552 /* Functions imported in another function are not really nested.
553 For really nested functions mark them initially as needing
554 a static chain for uses of that flag before unnesting;
555 lower_nested_functions will then recompute it. */
556 if (TREE_CODE (decl) == FUNCTION_DECL && !TREE_PUBLIC (decl))
557 DECL_STATIC_CHAIN (decl) = 1;
558 }
559
560 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
561
562 /* Set the location of DECL and emit a declaration for it. */
563 if (Present (gnat_node))
564 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
565
566 add_decl_expr (decl, gnat_node);
567
568 /* Put the declaration on the list. The list of declarations is in reverse
569 order. The list will be reversed later. Put global declarations in the
570 globals list and local ones in the current block. But skip TYPE_DECLs
571 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
572 with the debugger and aren't needed anyway. */
573 if (!(TREE_CODE (decl) == TYPE_DECL
574 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
575 {
576 if (DECL_EXTERNAL (decl))
577 {
578 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
579 VEC_safe_push (tree, gc, builtin_decls, decl);
580 }
581 else if (global_bindings_p ())
582 VEC_safe_push (tree, gc, global_decls, decl);
583 else
584 {
585 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
586 BLOCK_VARS (current_binding_level->block) = decl;
587 }
588 }
589
590 /* For the declaration of a type, set its name if it either is not already
591 set or if the previous type name was not derived from a source name.
592 We'd rather have the type named with a real name and all the pointer
593 types to the same object have the same POINTER_TYPE node. Code in the
594 equivalent function of c-decl.c makes a copy of the type node here, but
595 that may cause us trouble with incomplete types. We make an exception
596 for fat pointer types because the compiler automatically builds them
597 for unconstrained array types and the debugger uses them to represent
598 both these and pointers to these. */
599 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
600 {
601 tree t = TREE_TYPE (decl);
602
603 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
604 {
605 /* Array and pointer types aren't "tagged" types so we force the
606 type to be associated with its typedef in the DWARF back-end,
607 in order to make sure that the latter is always preserved. */
608 if (!DECL_ARTIFICIAL (decl)
609 && (TREE_CODE (t) == ARRAY_TYPE
610 || TREE_CODE (t) == POINTER_TYPE))
611 {
612 tree tt = build_distinct_type_copy (t);
613 if (TREE_CODE (t) == POINTER_TYPE)
614 TYPE_NEXT_PTR_TO (t) = tt;
615 TYPE_NAME (tt) = DECL_NAME (decl);
616 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
617 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
618 DECL_ORIGINAL_TYPE (decl) = tt;
619 }
620 }
621 else if (TYPE_IS_FAT_POINTER_P (t))
622 {
623 /* We need a variant for the placeholder machinery to work. */
624 tree tt = build_variant_type_copy (t);
625 TYPE_NAME (tt) = decl;
626 TYPE_CONTEXT (tt) = DECL_CONTEXT (decl);
627 TREE_USED (tt) = TREE_USED (t);
628 TREE_TYPE (decl) = tt;
629 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
630 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
631 else
632 DECL_ORIGINAL_TYPE (decl) = t;
633 DECL_ARTIFICIAL (decl) = 0;
634 t = NULL_TREE;
635 }
636 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
637 ;
638 else
639 t = NULL_TREE;
640
641 /* Propagate the name to all the anonymous variants. This is needed
642 for the type qualifiers machinery to work properly. */
643 if (t)
644 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
645 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
646 {
647 TYPE_NAME (t) = decl;
648 TYPE_CONTEXT (t) = DECL_CONTEXT (decl);
649 }
650 }
651 }
652 \f
653 /* Create a record type that contains a SIZE bytes long field of TYPE with a
654 starting bit position so that it is aligned to ALIGN bits, and leaving at
655 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
656 record is guaranteed to get. */
657
658 tree
659 make_aligning_type (tree type, unsigned int align, tree size,
660 unsigned int base_align, int room)
661 {
662 /* We will be crafting a record type with one field at a position set to be
663 the next multiple of ALIGN past record'address + room bytes. We use a
664 record placeholder to express record'address. */
665 tree record_type = make_node (RECORD_TYPE);
666 tree record = build0 (PLACEHOLDER_EXPR, record_type);
667
668 tree record_addr_st
669 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
670
671 /* The diagram below summarizes the shape of what we manipulate:
672
673 <--------- pos ---------->
674 { +------------+-------------+-----------------+
675 record =>{ |############| ... | field (type) |
676 { +------------+-------------+-----------------+
677 |<-- room -->|<- voffset ->|<---- size ----->|
678 o o
679 | |
680 record_addr vblock_addr
681
682 Every length is in sizetype bytes there, except "pos" which has to be
683 set as a bit position in the GCC tree for the record. */
684 tree room_st = size_int (room);
685 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
686 tree voffset_st, pos, field;
687
688 tree name = TYPE_NAME (type);
689
690 if (TREE_CODE (name) == TYPE_DECL)
691 name = DECL_NAME (name);
692 name = concat_name (name, "ALIGN");
693 TYPE_NAME (record_type) = name;
694
695 /* Compute VOFFSET and then POS. The next byte position multiple of some
696 alignment after some address is obtained by "and"ing the alignment minus
697 1 with the two's complement of the address. */
698 voffset_st = size_binop (BIT_AND_EXPR,
699 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
700 size_int ((align / BITS_PER_UNIT) - 1));
701
702 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
703 pos = size_binop (MULT_EXPR,
704 convert (bitsizetype,
705 size_binop (PLUS_EXPR, room_st, voffset_st)),
706 bitsize_unit_node);
707
708 /* Craft the GCC record representation. We exceptionally do everything
709 manually here because 1) our generic circuitry is not quite ready to
710 handle the complex position/size expressions we are setting up, 2) we
711 have a strong simplifying factor at hand: we know the maximum possible
712 value of voffset, and 3) we have to set/reset at least the sizes in
713 accordance with this maximum value anyway, as we need them to convey
714 what should be "alloc"ated for this type.
715
716 Use -1 as the 'addressable' indication for the field to prevent the
717 creation of a bitfield. We don't need one, it would have damaging
718 consequences on the alignment computation, and create_field_decl would
719 make one without this special argument, for instance because of the
720 complex position expression. */
721 field = create_field_decl (get_identifier ("F"), type, record_type, size,
722 pos, 1, -1);
723 TYPE_FIELDS (record_type) = field;
724
725 TYPE_ALIGN (record_type) = base_align;
726 TYPE_USER_ALIGN (record_type) = 1;
727
728 TYPE_SIZE (record_type)
729 = size_binop (PLUS_EXPR,
730 size_binop (MULT_EXPR, convert (bitsizetype, size),
731 bitsize_unit_node),
732 bitsize_int (align + room * BITS_PER_UNIT));
733 TYPE_SIZE_UNIT (record_type)
734 = size_binop (PLUS_EXPR, size,
735 size_int (room + align / BITS_PER_UNIT));
736
737 SET_TYPE_MODE (record_type, BLKmode);
738 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
739
740 /* Declare it now since it will never be declared otherwise. This is
741 necessary to ensure that its subtrees are properly marked. */
742 create_type_decl (name, record_type, NULL, true, false, Empty);
743
744 return record_type;
745 }
746
747 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
748 as the field type of a packed record if IN_RECORD is true, or as the
749 component type of a packed array if IN_RECORD is false. See if we can
750 rewrite it either as a type that has a non-BLKmode, which we can pack
751 tighter in the packed record case, or as a smaller type. If so, return
752 the new type. If not, return the original type. */
753
754 tree
755 make_packable_type (tree type, bool in_record)
756 {
757 unsigned HOST_WIDE_INT size = tree_low_cst (TYPE_SIZE (type), 1);
758 unsigned HOST_WIDE_INT new_size;
759 tree new_type, old_field, field_list = NULL_TREE;
760 unsigned int align;
761
762 /* No point in doing anything if the size is zero. */
763 if (size == 0)
764 return type;
765
766 new_type = make_node (TREE_CODE (type));
767
768 /* Copy the name and flags from the old type to that of the new.
769 Note that we rely on the pointer equality created here for
770 TYPE_NAME to look through conversions in various places. */
771 TYPE_NAME (new_type) = TYPE_NAME (type);
772 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
773 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
774 if (TREE_CODE (type) == RECORD_TYPE)
775 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
776
777 /* If we are in a record and have a small size, set the alignment to
778 try for an integral mode. Otherwise set it to try for a smaller
779 type with BLKmode. */
780 if (in_record && size <= MAX_FIXED_MODE_SIZE)
781 {
782 align = ceil_pow2 (size);
783 TYPE_ALIGN (new_type) = align;
784 new_size = (size + align - 1) & -align;
785 }
786 else
787 {
788 unsigned HOST_WIDE_INT align;
789
790 /* Do not try to shrink the size if the RM size is not constant. */
791 if (TYPE_CONTAINS_TEMPLATE_P (type)
792 || !host_integerp (TYPE_ADA_SIZE (type), 1))
793 return type;
794
795 /* Round the RM size up to a unit boundary to get the minimal size
796 for a BLKmode record. Give up if it's already the size. */
797 new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type));
798 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
799 if (new_size == size)
800 return type;
801
802 align = new_size & -new_size;
803 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
804 }
805
806 TYPE_USER_ALIGN (new_type) = 1;
807
808 /* Now copy the fields, keeping the position and size as we don't want
809 to change the layout by propagating the packedness downwards. */
810 for (old_field = TYPE_FIELDS (type); old_field;
811 old_field = DECL_CHAIN (old_field))
812 {
813 tree new_field_type = TREE_TYPE (old_field);
814 tree new_field, new_size;
815
816 if (RECORD_OR_UNION_TYPE_P (new_field_type)
817 && !TYPE_FAT_POINTER_P (new_field_type)
818 && host_integerp (TYPE_SIZE (new_field_type), 1))
819 new_field_type = make_packable_type (new_field_type, true);
820
821 /* However, for the last field in a not already packed record type
822 that is of an aggregate type, we need to use the RM size in the
823 packable version of the record type, see finish_record_type. */
824 if (!DECL_CHAIN (old_field)
825 && !TYPE_PACKED (type)
826 && RECORD_OR_UNION_TYPE_P (new_field_type)
827 && !TYPE_FAT_POINTER_P (new_field_type)
828 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
829 && TYPE_ADA_SIZE (new_field_type))
830 new_size = TYPE_ADA_SIZE (new_field_type);
831 else
832 new_size = DECL_SIZE (old_field);
833
834 new_field
835 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
836 new_size, bit_position (old_field),
837 TYPE_PACKED (type),
838 !DECL_NONADDRESSABLE_P (old_field));
839
840 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
841 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
842 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
843 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
844
845 DECL_CHAIN (new_field) = field_list;
846 field_list = new_field;
847 }
848
849 finish_record_type (new_type, nreverse (field_list), 2, false);
850 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
851 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
852 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
853
854 /* If this is a padding record, we never want to make the size smaller
855 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
856 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
857 {
858 TYPE_SIZE (new_type) = TYPE_SIZE (type);
859 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
860 new_size = size;
861 }
862 else
863 {
864 TYPE_SIZE (new_type) = bitsize_int (new_size);
865 TYPE_SIZE_UNIT (new_type)
866 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
867 }
868
869 if (!TYPE_CONTAINS_TEMPLATE_P (type))
870 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
871
872 compute_record_mode (new_type);
873
874 /* Try harder to get a packable type if necessary, for example
875 in case the record itself contains a BLKmode field. */
876 if (in_record && TYPE_MODE (new_type) == BLKmode)
877 SET_TYPE_MODE (new_type,
878 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
879
880 /* If neither the mode nor the size has shrunk, return the old type. */
881 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
882 return type;
883
884 return new_type;
885 }
886
887 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
888 If TYPE is the best type, return it. Otherwise, make a new type. We
889 only support new integral and pointer types. FOR_BIASED is true if
890 we are making a biased type. */
891
892 tree
893 make_type_from_size (tree type, tree size_tree, bool for_biased)
894 {
895 unsigned HOST_WIDE_INT size;
896 bool biased_p;
897 tree new_type;
898
899 /* If size indicates an error, just return TYPE to avoid propagating
900 the error. Likewise if it's too large to represent. */
901 if (!size_tree || !host_integerp (size_tree, 1))
902 return type;
903
904 size = tree_low_cst (size_tree, 1);
905
906 switch (TREE_CODE (type))
907 {
908 case INTEGER_TYPE:
909 case ENUMERAL_TYPE:
910 case BOOLEAN_TYPE:
911 biased_p = (TREE_CODE (type) == INTEGER_TYPE
912 && TYPE_BIASED_REPRESENTATION_P (type));
913
914 /* Integer types with precision 0 are forbidden. */
915 if (size == 0)
916 size = 1;
917
918 /* Only do something if the type isn't a packed array type and doesn't
919 already have the proper size and the size isn't too large. */
920 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
921 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
922 || size > LONG_LONG_TYPE_SIZE)
923 break;
924
925 biased_p |= for_biased;
926 if (TYPE_UNSIGNED (type) || biased_p)
927 new_type = make_unsigned_type (size);
928 else
929 new_type = make_signed_type (size);
930 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
931 SET_TYPE_RM_MIN_VALUE (new_type,
932 convert (TREE_TYPE (new_type),
933 TYPE_MIN_VALUE (type)));
934 SET_TYPE_RM_MAX_VALUE (new_type,
935 convert (TREE_TYPE (new_type),
936 TYPE_MAX_VALUE (type)));
937 /* Copy the name to show that it's essentially the same type and
938 not a subrange type. */
939 TYPE_NAME (new_type) = TYPE_NAME (type);
940 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
941 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
942 return new_type;
943
944 case RECORD_TYPE:
945 /* Do something if this is a fat pointer, in which case we
946 may need to return the thin pointer. */
947 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
948 {
949 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
950 if (!targetm.valid_pointer_mode (p_mode))
951 p_mode = ptr_mode;
952 return
953 build_pointer_type_for_mode
954 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
955 p_mode, 0);
956 }
957 break;
958
959 case POINTER_TYPE:
960 /* Only do something if this is a thin pointer, in which case we
961 may need to return the fat pointer. */
962 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
963 return
964 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
965 break;
966
967 default:
968 break;
969 }
970
971 return type;
972 }
973
974 /* See if the data pointed to by the hash table slot is marked. */
975
976 static int
977 pad_type_hash_marked_p (const void *p)
978 {
979 const_tree const type = ((const struct pad_type_hash *) p)->type;
980
981 return ggc_marked_p (type);
982 }
983
984 /* Return the cached hash value. */
985
986 static hashval_t
987 pad_type_hash_hash (const void *p)
988 {
989 return ((const struct pad_type_hash *) p)->hash;
990 }
991
992 /* Return 1 iff the padded types are equivalent. */
993
994 static int
995 pad_type_hash_eq (const void *p1, const void *p2)
996 {
997 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
998 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
999 tree type1, type2;
1000
1001 if (t1->hash != t2->hash)
1002 return 0;
1003
1004 type1 = t1->type;
1005 type2 = t2->type;
1006
1007 /* We consider that the padded types are equivalent if they pad the same
1008 type and have the same size, alignment and RM size. Taking the mode
1009 into account is redundant since it is determined by the others. */
1010 return
1011 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1012 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1013 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1014 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1015 }
1016
1017 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1018 if needed. We have already verified that SIZE and TYPE are large enough.
1019 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1020 IS_COMPONENT_TYPE is true if this is being done for the component type of
1021 an array. IS_USER_TYPE is true if the original type needs to be completed.
1022 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1023 the RM size of the resulting type is to be set to SIZE too. */
1024
1025 tree
1026 maybe_pad_type (tree type, tree size, unsigned int align,
1027 Entity_Id gnat_entity, bool is_component_type,
1028 bool is_user_type, bool definition, bool set_rm_size)
1029 {
1030 tree orig_size = TYPE_SIZE (type);
1031 tree record, field;
1032
1033 /* If TYPE is a padded type, see if it agrees with any size and alignment
1034 we were given. If so, return the original type. Otherwise, strip
1035 off the padding, since we will either be returning the inner type
1036 or repadding it. If no size or alignment is specified, use that of
1037 the original padded type. */
1038 if (TYPE_IS_PADDING_P (type))
1039 {
1040 if ((!size
1041 || operand_equal_p (round_up (size,
1042 MAX (align, TYPE_ALIGN (type))),
1043 round_up (TYPE_SIZE (type),
1044 MAX (align, TYPE_ALIGN (type))),
1045 0))
1046 && (align == 0 || align == TYPE_ALIGN (type)))
1047 return type;
1048
1049 if (!size)
1050 size = TYPE_SIZE (type);
1051 if (align == 0)
1052 align = TYPE_ALIGN (type);
1053
1054 type = TREE_TYPE (TYPE_FIELDS (type));
1055 orig_size = TYPE_SIZE (type);
1056 }
1057
1058 /* If the size is either not being changed or is being made smaller (which
1059 is not done here and is only valid for bitfields anyway), show the size
1060 isn't changing. Likewise, clear the alignment if it isn't being
1061 changed. Then return if we aren't doing anything. */
1062 if (size
1063 && (operand_equal_p (size, orig_size, 0)
1064 || (TREE_CODE (orig_size) == INTEGER_CST
1065 && tree_int_cst_lt (size, orig_size))))
1066 size = NULL_TREE;
1067
1068 if (align == TYPE_ALIGN (type))
1069 align = 0;
1070
1071 if (align == 0 && !size)
1072 return type;
1073
1074 /* If requested, complete the original type and give it a name. */
1075 if (is_user_type)
1076 create_type_decl (get_entity_name (gnat_entity), type,
1077 NULL, !Comes_From_Source (gnat_entity),
1078 !(TYPE_NAME (type)
1079 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1080 && DECL_IGNORED_P (TYPE_NAME (type))),
1081 gnat_entity);
1082
1083 /* We used to modify the record in place in some cases, but that could
1084 generate incorrect debugging information. So make a new record
1085 type and name. */
1086 record = make_node (RECORD_TYPE);
1087 TYPE_PADDING_P (record) = 1;
1088
1089 if (Present (gnat_entity))
1090 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1091
1092 TYPE_ALIGN (record) = align;
1093 TYPE_SIZE (record) = size ? size : orig_size;
1094 TYPE_SIZE_UNIT (record)
1095 = convert (sizetype,
1096 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1097 bitsize_unit_node));
1098
1099 /* If we are changing the alignment and the input type is a record with
1100 BLKmode and a small constant size, try to make a form that has an
1101 integral mode. This might allow the padding record to also have an
1102 integral mode, which will be much more efficient. There is no point
1103 in doing so if a size is specified unless it is also a small constant
1104 size and it is incorrect to do so if we cannot guarantee that the mode
1105 will be naturally aligned since the field must always be addressable.
1106
1107 ??? This might not always be a win when done for a stand-alone object:
1108 since the nominal and the effective type of the object will now have
1109 different modes, a VIEW_CONVERT_EXPR will be required for converting
1110 between them and it might be hard to overcome afterwards, including
1111 at the RTL level when the stand-alone object is accessed as a whole. */
1112 if (align != 0
1113 && RECORD_OR_UNION_TYPE_P (type)
1114 && TYPE_MODE (type) == BLKmode
1115 && !TYPE_BY_REFERENCE_P (type)
1116 && TREE_CODE (orig_size) == INTEGER_CST
1117 && !TREE_OVERFLOW (orig_size)
1118 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1119 && (!size
1120 || (TREE_CODE (size) == INTEGER_CST
1121 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1122 {
1123 tree packable_type = make_packable_type (type, true);
1124 if (TYPE_MODE (packable_type) != BLKmode
1125 && align >= TYPE_ALIGN (packable_type))
1126 type = packable_type;
1127 }
1128
1129 /* Now create the field with the original size. */
1130 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1131 bitsize_zero_node, 0, 1);
1132 DECL_INTERNAL_P (field) = 1;
1133
1134 /* Do not emit debug info until after the auxiliary record is built. */
1135 finish_record_type (record, field, 1, false);
1136
1137 /* Set the RM size if requested. */
1138 if (set_rm_size)
1139 {
1140 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1141
1142 /* If the padded type is complete and has constant size, we canonicalize
1143 it by means of the hash table. This is consistent with the language
1144 semantics and ensures that gigi and the middle-end have a common view
1145 of these padded types. */
1146 if (TREE_CONSTANT (TYPE_SIZE (record)))
1147 {
1148 hashval_t hashcode;
1149 struct pad_type_hash in, *h;
1150 void **loc;
1151
1152 hashcode = iterative_hash_object (TYPE_HASH (type), 0);
1153 hashcode = iterative_hash_expr (TYPE_SIZE (record), hashcode);
1154 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (record), hashcode);
1155 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (record), hashcode);
1156
1157 in.hash = hashcode;
1158 in.type = record;
1159 h = (struct pad_type_hash *)
1160 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1161 if (h)
1162 {
1163 record = h->type;
1164 goto built;
1165 }
1166
1167 h = ggc_alloc_pad_type_hash ();
1168 h->hash = hashcode;
1169 h->type = record;
1170 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode,
1171 INSERT);
1172 *loc = (void *)h;
1173 }
1174 }
1175
1176 /* Unless debugging information isn't being written for the input type,
1177 write a record that shows what we are a subtype of and also make a
1178 variable that indicates our size, if still variable. */
1179 if (TREE_CODE (orig_size) != INTEGER_CST
1180 && TYPE_NAME (record)
1181 && TYPE_NAME (type)
1182 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1183 && DECL_IGNORED_P (TYPE_NAME (type))))
1184 {
1185 tree marker = make_node (RECORD_TYPE);
1186 tree name = TYPE_NAME (record);
1187 tree orig_name = TYPE_NAME (type);
1188
1189 if (TREE_CODE (name) == TYPE_DECL)
1190 name = DECL_NAME (name);
1191
1192 if (TREE_CODE (orig_name) == TYPE_DECL)
1193 orig_name = DECL_NAME (orig_name);
1194
1195 TYPE_NAME (marker) = concat_name (name, "XVS");
1196 finish_record_type (marker,
1197 create_field_decl (orig_name,
1198 build_reference_type (type),
1199 marker, NULL_TREE, NULL_TREE,
1200 0, 0),
1201 0, true);
1202
1203 add_parallel_type (record, marker);
1204
1205 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1206 TYPE_SIZE_UNIT (marker)
1207 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1208 TYPE_SIZE_UNIT (record), false, false, false,
1209 false, NULL, gnat_entity);
1210 }
1211
1212 rest_of_record_type_compilation (record);
1213
1214 built:
1215 /* If the size was widened explicitly, maybe give a warning. Take the
1216 original size as the maximum size of the input if there was an
1217 unconstrained record involved and round it up to the specified alignment,
1218 if one was specified. But don't do it if we are just annotating types
1219 and the type is tagged, since tagged types aren't fully laid out in this
1220 mode. */
1221 if (!size
1222 || TREE_CODE (size) == COND_EXPR
1223 || TREE_CODE (size) == MAX_EXPR
1224 || No (gnat_entity)
1225 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1226 return record;
1227
1228 if (CONTAINS_PLACEHOLDER_P (orig_size))
1229 orig_size = max_size (orig_size, true);
1230
1231 if (align)
1232 orig_size = round_up (orig_size, align);
1233
1234 if (!operand_equal_p (size, orig_size, 0)
1235 && !(TREE_CODE (size) == INTEGER_CST
1236 && TREE_CODE (orig_size) == INTEGER_CST
1237 && (TREE_OVERFLOW (size)
1238 || TREE_OVERFLOW (orig_size)
1239 || tree_int_cst_lt (size, orig_size))))
1240 {
1241 Node_Id gnat_error_node = Empty;
1242
1243 if (Is_Packed_Array_Type (gnat_entity))
1244 gnat_entity = Original_Array_Type (gnat_entity);
1245
1246 if ((Ekind (gnat_entity) == E_Component
1247 || Ekind (gnat_entity) == E_Discriminant)
1248 && Present (Component_Clause (gnat_entity)))
1249 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1250 else if (Present (Size_Clause (gnat_entity)))
1251 gnat_error_node = Expression (Size_Clause (gnat_entity));
1252
1253 /* Generate message only for entities that come from source, since
1254 if we have an entity created by expansion, the message will be
1255 generated for some other corresponding source entity. */
1256 if (Comes_From_Source (gnat_entity))
1257 {
1258 if (Present (gnat_error_node))
1259 post_error_ne_tree ("{^ }bits of & unused?",
1260 gnat_error_node, gnat_entity,
1261 size_diffop (size, orig_size));
1262 else if (is_component_type)
1263 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1264 gnat_entity, gnat_entity,
1265 size_diffop (size, orig_size));
1266 }
1267 }
1268
1269 return record;
1270 }
1271 \f
1272 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1273 If this is a multi-dimensional array type, do this recursively.
1274
1275 OP may be
1276 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1277 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1278 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1279
1280 void
1281 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1282 {
1283 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1284 of a one-dimensional array, since the padding has the same alias set
1285 as the field type, but if it's a multi-dimensional array, we need to
1286 see the inner types. */
1287 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1288 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1289 || TYPE_PADDING_P (gnu_old_type)))
1290 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1291
1292 /* Unconstrained array types are deemed incomplete and would thus be given
1293 alias set 0. Retrieve the underlying array type. */
1294 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1295 gnu_old_type
1296 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1297 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1298 gnu_new_type
1299 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1300
1301 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1302 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1303 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1304 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1305
1306 switch (op)
1307 {
1308 case ALIAS_SET_COPY:
1309 /* The alias set shouldn't be copied between array types with different
1310 aliasing settings because this can break the aliasing relationship
1311 between the array type and its element type. */
1312 #ifndef ENABLE_CHECKING
1313 if (flag_strict_aliasing)
1314 #endif
1315 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1316 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1317 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1318 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1319
1320 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1321 break;
1322
1323 case ALIAS_SET_SUBSET:
1324 case ALIAS_SET_SUPERSET:
1325 {
1326 alias_set_type old_set = get_alias_set (gnu_old_type);
1327 alias_set_type new_set = get_alias_set (gnu_new_type);
1328
1329 /* Do nothing if the alias sets conflict. This ensures that we
1330 never call record_alias_subset several times for the same pair
1331 or at all for alias set 0. */
1332 if (!alias_sets_conflict_p (old_set, new_set))
1333 {
1334 if (op == ALIAS_SET_SUBSET)
1335 record_alias_subset (old_set, new_set);
1336 else
1337 record_alias_subset (new_set, old_set);
1338 }
1339 }
1340 break;
1341
1342 default:
1343 gcc_unreachable ();
1344 }
1345
1346 record_component_aliases (gnu_new_type);
1347 }
1348 \f
1349 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1350 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1351
1352 void
1353 record_builtin_type (const char *name, tree type, bool artificial_p)
1354 {
1355 tree type_decl = build_decl (input_location,
1356 TYPE_DECL, get_identifier (name), type);
1357 DECL_ARTIFICIAL (type_decl) = artificial_p;
1358 TYPE_ARTIFICIAL (type) = artificial_p;
1359 gnat_pushdecl (type_decl, Empty);
1360
1361 if (debug_hooks->type_decl)
1362 debug_hooks->type_decl (type_decl, false);
1363 }
1364 \f
1365 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1366 finish constructing the record type as a fat pointer type. */
1367
1368 void
1369 finish_fat_pointer_type (tree record_type, tree field_list)
1370 {
1371 /* Make sure we can put it into a register. */
1372 if (STRICT_ALIGNMENT)
1373 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1374
1375 /* Show what it really is. */
1376 TYPE_FAT_POINTER_P (record_type) = 1;
1377
1378 /* Do not emit debug info for it since the types of its fields may still be
1379 incomplete at this point. */
1380 finish_record_type (record_type, field_list, 0, false);
1381
1382 /* Force type_contains_placeholder_p to return true on it. Although the
1383 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1384 type but the representation of the unconstrained array. */
1385 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1386 }
1387
1388 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1389 finish constructing the record or union type. If REP_LEVEL is zero, this
1390 record has no representation clause and so will be entirely laid out here.
1391 If REP_LEVEL is one, this record has a representation clause and has been
1392 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1393 this record is derived from a parent record and thus inherits its layout;
1394 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1395 we need to write debug information about this type. */
1396
1397 void
1398 finish_record_type (tree record_type, tree field_list, int rep_level,
1399 bool debug_info_p)
1400 {
1401 enum tree_code code = TREE_CODE (record_type);
1402 tree name = TYPE_NAME (record_type);
1403 tree ada_size = bitsize_zero_node;
1404 tree size = bitsize_zero_node;
1405 bool had_size = TYPE_SIZE (record_type) != 0;
1406 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1407 bool had_align = TYPE_ALIGN (record_type) != 0;
1408 tree field;
1409
1410 TYPE_FIELDS (record_type) = field_list;
1411
1412 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1413 generate debug info and have a parallel type. */
1414 if (name && TREE_CODE (name) == TYPE_DECL)
1415 name = DECL_NAME (name);
1416 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1417
1418 /* Globally initialize the record first. If this is a rep'ed record,
1419 that just means some initializations; otherwise, layout the record. */
1420 if (rep_level > 0)
1421 {
1422 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1423
1424 if (!had_size_unit)
1425 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1426
1427 if (!had_size)
1428 TYPE_SIZE (record_type) = bitsize_zero_node;
1429
1430 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1431 out just like a UNION_TYPE, since the size will be fixed. */
1432 else if (code == QUAL_UNION_TYPE)
1433 code = UNION_TYPE;
1434 }
1435 else
1436 {
1437 /* Ensure there isn't a size already set. There can be in an error
1438 case where there is a rep clause but all fields have errors and
1439 no longer have a position. */
1440 TYPE_SIZE (record_type) = 0;
1441
1442 /* Ensure we use the traditional GCC layout for bitfields when we need
1443 to pack the record type or have a representation clause. The other
1444 possible layout (Microsoft C compiler), if available, would prevent
1445 efficient packing in almost all cases. */
1446 #ifdef TARGET_MS_BITFIELD_LAYOUT
1447 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1448 decl_attributes (&record_type,
1449 tree_cons (get_identifier ("gcc_struct"),
1450 NULL_TREE, NULL_TREE),
1451 ATTR_FLAG_TYPE_IN_PLACE);
1452 #endif
1453
1454 layout_type (record_type);
1455 }
1456
1457 /* At this point, the position and size of each field is known. It was
1458 either set before entry by a rep clause, or by laying out the type above.
1459
1460 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1461 to compute the Ada size; the GCC size and alignment (for rep'ed records
1462 that are not padding types); and the mode (for rep'ed records). We also
1463 clear the DECL_BIT_FIELD indication for the cases we know have not been
1464 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1465
1466 if (code == QUAL_UNION_TYPE)
1467 field_list = nreverse (field_list);
1468
1469 for (field = field_list; field; field = DECL_CHAIN (field))
1470 {
1471 tree type = TREE_TYPE (field);
1472 tree pos = bit_position (field);
1473 tree this_size = DECL_SIZE (field);
1474 tree this_ada_size;
1475
1476 if (RECORD_OR_UNION_TYPE_P (type)
1477 && !TYPE_FAT_POINTER_P (type)
1478 && !TYPE_CONTAINS_TEMPLATE_P (type)
1479 && TYPE_ADA_SIZE (type))
1480 this_ada_size = TYPE_ADA_SIZE (type);
1481 else
1482 this_ada_size = this_size;
1483
1484 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1485 if (DECL_BIT_FIELD (field)
1486 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1487 {
1488 unsigned int align = TYPE_ALIGN (type);
1489
1490 /* In the general case, type alignment is required. */
1491 if (value_factor_p (pos, align))
1492 {
1493 /* The enclosing record type must be sufficiently aligned.
1494 Otherwise, if no alignment was specified for it and it
1495 has been laid out already, bump its alignment to the
1496 desired one if this is compatible with its size. */
1497 if (TYPE_ALIGN (record_type) >= align)
1498 {
1499 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1500 DECL_BIT_FIELD (field) = 0;
1501 }
1502 else if (!had_align
1503 && rep_level == 0
1504 && value_factor_p (TYPE_SIZE (record_type), align))
1505 {
1506 TYPE_ALIGN (record_type) = align;
1507 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1508 DECL_BIT_FIELD (field) = 0;
1509 }
1510 }
1511
1512 /* In the non-strict alignment case, only byte alignment is. */
1513 if (!STRICT_ALIGNMENT
1514 && DECL_BIT_FIELD (field)
1515 && value_factor_p (pos, BITS_PER_UNIT))
1516 DECL_BIT_FIELD (field) = 0;
1517 }
1518
1519 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1520 field is technically not addressable. Except that it can actually
1521 be addressed if it is BLKmode and happens to be properly aligned. */
1522 if (DECL_BIT_FIELD (field)
1523 && !(DECL_MODE (field) == BLKmode
1524 && value_factor_p (pos, BITS_PER_UNIT)))
1525 DECL_NONADDRESSABLE_P (field) = 1;
1526
1527 /* A type must be as aligned as its most aligned field that is not
1528 a bit-field. But this is already enforced by layout_type. */
1529 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1530 TYPE_ALIGN (record_type)
1531 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1532
1533 switch (code)
1534 {
1535 case UNION_TYPE:
1536 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1537 size = size_binop (MAX_EXPR, size, this_size);
1538 break;
1539
1540 case QUAL_UNION_TYPE:
1541 ada_size
1542 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1543 this_ada_size, ada_size);
1544 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1545 this_size, size);
1546 break;
1547
1548 case RECORD_TYPE:
1549 /* Since we know here that all fields are sorted in order of
1550 increasing bit position, the size of the record is one
1551 higher than the ending bit of the last field processed
1552 unless we have a rep clause, since in that case we might
1553 have a field outside a QUAL_UNION_TYPE that has a higher ending
1554 position. So use a MAX in that case. Also, if this field is a
1555 QUAL_UNION_TYPE, we need to take into account the previous size in
1556 the case of empty variants. */
1557 ada_size
1558 = merge_sizes (ada_size, pos, this_ada_size,
1559 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1560 size
1561 = merge_sizes (size, pos, this_size,
1562 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1563 break;
1564
1565 default:
1566 gcc_unreachable ();
1567 }
1568 }
1569
1570 if (code == QUAL_UNION_TYPE)
1571 nreverse (field_list);
1572
1573 if (rep_level < 2)
1574 {
1575 /* If this is a padding record, we never want to make the size smaller
1576 than what was specified in it, if any. */
1577 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1578 size = TYPE_SIZE (record_type);
1579
1580 /* Now set any of the values we've just computed that apply. */
1581 if (!TYPE_FAT_POINTER_P (record_type)
1582 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1583 SET_TYPE_ADA_SIZE (record_type, ada_size);
1584
1585 if (rep_level > 0)
1586 {
1587 tree size_unit = had_size_unit
1588 ? TYPE_SIZE_UNIT (record_type)
1589 : convert (sizetype,
1590 size_binop (CEIL_DIV_EXPR, size,
1591 bitsize_unit_node));
1592 unsigned int align = TYPE_ALIGN (record_type);
1593
1594 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1595 TYPE_SIZE_UNIT (record_type)
1596 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1597
1598 compute_record_mode (record_type);
1599 }
1600 }
1601
1602 if (debug_info_p)
1603 rest_of_record_type_compilation (record_type);
1604 }
1605
1606 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. */
1607
1608 void
1609 add_parallel_type (tree type, tree parallel_type)
1610 {
1611 tree decl = TYPE_STUB_DECL (type);
1612
1613 while (DECL_PARALLEL_TYPE (decl))
1614 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1615
1616 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1617 }
1618
1619 /* Return true if TYPE has a parallel type. */
1620
1621 static bool
1622 has_parallel_type (tree type)
1623 {
1624 tree decl = TYPE_STUB_DECL (type);
1625
1626 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1627 }
1628
1629 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1630 associated with it. It need not be invoked directly in most cases since
1631 finish_record_type takes care of doing so, but this can be necessary if
1632 a parallel type is to be attached to the record type. */
1633
1634 void
1635 rest_of_record_type_compilation (tree record_type)
1636 {
1637 bool var_size = false;
1638 tree field;
1639
1640 /* If this is a padded type, the bulk of the debug info has already been
1641 generated for the field's type. */
1642 if (TYPE_IS_PADDING_P (record_type))
1643 return;
1644
1645 /* If the type already has a parallel type (XVS type), then we're done. */
1646 if (has_parallel_type (record_type))
1647 return;
1648
1649 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1650 {
1651 /* We need to make an XVE/XVU record if any field has variable size,
1652 whether or not the record does. For example, if we have a union,
1653 it may be that all fields, rounded up to the alignment, have the
1654 same size, in which case we'll use that size. But the debug
1655 output routines (except Dwarf2) won't be able to output the fields,
1656 so we need to make the special record. */
1657 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1658 /* If a field has a non-constant qualifier, the record will have
1659 variable size too. */
1660 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1661 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1662 {
1663 var_size = true;
1664 break;
1665 }
1666 }
1667
1668 /* If this record type is of variable size, make a parallel record type that
1669 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1670 if (var_size)
1671 {
1672 tree new_record_type
1673 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1674 ? UNION_TYPE : TREE_CODE (record_type));
1675 tree orig_name = TYPE_NAME (record_type), new_name;
1676 tree last_pos = bitsize_zero_node;
1677 tree old_field, prev_old_field = NULL_TREE;
1678
1679 if (TREE_CODE (orig_name) == TYPE_DECL)
1680 orig_name = DECL_NAME (orig_name);
1681
1682 new_name
1683 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1684 ? "XVU" : "XVE");
1685 TYPE_NAME (new_record_type) = new_name;
1686 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1687 TYPE_STUB_DECL (new_record_type)
1688 = create_type_stub_decl (new_name, new_record_type);
1689 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1690 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1691 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1692 TYPE_SIZE_UNIT (new_record_type)
1693 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1694
1695 /* Now scan all the fields, replacing each field with a new
1696 field corresponding to the new encoding. */
1697 for (old_field = TYPE_FIELDS (record_type); old_field;
1698 old_field = DECL_CHAIN (old_field))
1699 {
1700 tree field_type = TREE_TYPE (old_field);
1701 tree field_name = DECL_NAME (old_field);
1702 tree new_field;
1703 tree curpos = bit_position (old_field);
1704 bool var = false;
1705 unsigned int align = 0;
1706 tree pos;
1707
1708 /* See how the position was modified from the last position.
1709
1710 There are two basic cases we support: a value was added
1711 to the last position or the last position was rounded to
1712 a boundary and they something was added. Check for the
1713 first case first. If not, see if there is any evidence
1714 of rounding. If so, round the last position and try
1715 again.
1716
1717 If this is a union, the position can be taken as zero. */
1718
1719 /* Some computations depend on the shape of the position expression,
1720 so strip conversions to make sure it's exposed. */
1721 curpos = remove_conversions (curpos, true);
1722
1723 if (TREE_CODE (new_record_type) == UNION_TYPE)
1724 pos = bitsize_zero_node, align = 0;
1725 else
1726 pos = compute_related_constant (curpos, last_pos);
1727
1728 if (!pos && TREE_CODE (curpos) == MULT_EXPR
1729 && host_integerp (TREE_OPERAND (curpos, 1), 1))
1730 {
1731 tree offset = TREE_OPERAND (curpos, 0);
1732 align = tree_low_cst (TREE_OPERAND (curpos, 1), 1);
1733
1734 /* An offset which is a bitwise AND with a negative power of 2
1735 means an alignment corresponding to this power of 2. Note
1736 that, as sizetype is sign-extended but nonetheless unsigned,
1737 we don't directly use tree_int_cst_sgn. */
1738 offset = remove_conversions (offset, true);
1739 if (TREE_CODE (offset) == BIT_AND_EXPR
1740 && host_integerp (TREE_OPERAND (offset, 1), 0)
1741 && TREE_INT_CST_HIGH (TREE_OPERAND (offset, 1)) < 0)
1742 {
1743 unsigned int pow
1744 = - tree_low_cst (TREE_OPERAND (offset, 1), 0);
1745 if (exact_log2 (pow) > 0)
1746 align *= pow;
1747 }
1748
1749 pos = compute_related_constant (curpos,
1750 round_up (last_pos, align));
1751 }
1752 else if (!pos && TREE_CODE (curpos) == PLUS_EXPR
1753 && TREE_CODE (TREE_OPERAND (curpos, 1)) == INTEGER_CST
1754 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1755 && host_integerp (TREE_OPERAND
1756 (TREE_OPERAND (curpos, 0), 1),
1757 1))
1758 {
1759 align
1760 = tree_low_cst
1761 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1), 1);
1762 pos = compute_related_constant (curpos,
1763 round_up (last_pos, align));
1764 }
1765 else if (potential_alignment_gap (prev_old_field, old_field,
1766 pos))
1767 {
1768 align = TYPE_ALIGN (field_type);
1769 pos = compute_related_constant (curpos,
1770 round_up (last_pos, align));
1771 }
1772
1773 /* If we can't compute a position, set it to zero.
1774
1775 ??? We really should abort here, but it's too much work
1776 to get this correct for all cases. */
1777
1778 if (!pos)
1779 pos = bitsize_zero_node;
1780
1781 /* See if this type is variable-sized and make a pointer type
1782 and indicate the indirection if so. Beware that the debug
1783 back-end may adjust the position computed above according
1784 to the alignment of the field type, i.e. the pointer type
1785 in this case, if we don't preventively counter that. */
1786 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1787 {
1788 field_type = build_pointer_type (field_type);
1789 if (align != 0 && TYPE_ALIGN (field_type) > align)
1790 {
1791 field_type = copy_node (field_type);
1792 TYPE_ALIGN (field_type) = align;
1793 }
1794 var = true;
1795 }
1796
1797 /* Make a new field name, if necessary. */
1798 if (var || align != 0)
1799 {
1800 char suffix[16];
1801
1802 if (align != 0)
1803 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1804 align / BITS_PER_UNIT);
1805 else
1806 strcpy (suffix, "XVL");
1807
1808 field_name = concat_name (field_name, suffix);
1809 }
1810
1811 new_field
1812 = create_field_decl (field_name, field_type, new_record_type,
1813 DECL_SIZE (old_field), pos, 0, 0);
1814 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1815 TYPE_FIELDS (new_record_type) = new_field;
1816
1817 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1818 zero. The only time it's not the last field of the record
1819 is when there are other components at fixed positions after
1820 it (meaning there was a rep clause for every field) and we
1821 want to be able to encode them. */
1822 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1823 (TREE_CODE (TREE_TYPE (old_field))
1824 == QUAL_UNION_TYPE)
1825 ? bitsize_zero_node
1826 : DECL_SIZE (old_field));
1827 prev_old_field = old_field;
1828 }
1829
1830 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
1831
1832 add_parallel_type (record_type, new_record_type);
1833 }
1834 }
1835
1836 /* Utility function of above to merge LAST_SIZE, the previous size of a record
1837 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
1838 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
1839 replace a value of zero with the old size. If HAS_REP is true, we take the
1840 MAX of the end position of this field with LAST_SIZE. In all other cases,
1841 we use FIRST_BIT plus SIZE. Return an expression for the size. */
1842
1843 static tree
1844 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
1845 bool has_rep)
1846 {
1847 tree type = TREE_TYPE (last_size);
1848 tree new_size;
1849
1850 if (!special || TREE_CODE (size) != COND_EXPR)
1851 {
1852 new_size = size_binop (PLUS_EXPR, first_bit, size);
1853 if (has_rep)
1854 new_size = size_binop (MAX_EXPR, last_size, new_size);
1855 }
1856
1857 else
1858 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
1859 integer_zerop (TREE_OPERAND (size, 1))
1860 ? last_size : merge_sizes (last_size, first_bit,
1861 TREE_OPERAND (size, 1),
1862 1, has_rep),
1863 integer_zerop (TREE_OPERAND (size, 2))
1864 ? last_size : merge_sizes (last_size, first_bit,
1865 TREE_OPERAND (size, 2),
1866 1, has_rep));
1867
1868 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
1869 when fed through substitute_in_expr) into thinking that a constant
1870 size is not constant. */
1871 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
1872 new_size = TREE_OPERAND (new_size, 0);
1873
1874 return new_size;
1875 }
1876
1877 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
1878 related by the addition of a constant. Return that constant if so. */
1879
1880 static tree
1881 compute_related_constant (tree op0, tree op1)
1882 {
1883 tree op0_var, op1_var;
1884 tree op0_con = split_plus (op0, &op0_var);
1885 tree op1_con = split_plus (op1, &op1_var);
1886 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
1887
1888 if (operand_equal_p (op0_var, op1_var, 0))
1889 return result;
1890 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
1891 return result;
1892 else
1893 return 0;
1894 }
1895
1896 /* Utility function of above to split a tree OP which may be a sum, into a
1897 constant part, which is returned, and a variable part, which is stored
1898 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
1899 bitsizetype. */
1900
1901 static tree
1902 split_plus (tree in, tree *pvar)
1903 {
1904 /* Strip conversions in order to ease the tree traversal and maximize the
1905 potential for constant or plus/minus discovery. We need to be careful
1906 to always return and set *pvar to bitsizetype trees, but it's worth
1907 the effort. */
1908 in = remove_conversions (in, false);
1909
1910 *pvar = convert (bitsizetype, in);
1911
1912 if (TREE_CODE (in) == INTEGER_CST)
1913 {
1914 *pvar = bitsize_zero_node;
1915 return convert (bitsizetype, in);
1916 }
1917 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
1918 {
1919 tree lhs_var, rhs_var;
1920 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
1921 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
1922
1923 if (lhs_var == TREE_OPERAND (in, 0)
1924 && rhs_var == TREE_OPERAND (in, 1))
1925 return bitsize_zero_node;
1926
1927 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
1928 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
1929 }
1930 else
1931 return bitsize_zero_node;
1932 }
1933 \f
1934 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
1935 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
1936 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
1937 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
1938 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
1939 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
1940 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
1941 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
1942 invisible reference. */
1943
1944 tree
1945 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
1946 bool return_unconstrained_p, bool return_by_direct_ref_p,
1947 bool return_by_invisi_ref_p)
1948 {
1949 /* A list of the data type nodes of the subprogram formal parameters.
1950 This list is generated by traversing the input list of PARM_DECL
1951 nodes. */
1952 VEC(tree,gc) *param_type_list = NULL;
1953 tree t, type;
1954
1955 for (t = param_decl_list; t; t = DECL_CHAIN (t))
1956 VEC_safe_push (tree, gc, param_type_list, TREE_TYPE (t));
1957
1958 type = build_function_type_vec (return_type, param_type_list);
1959
1960 /* TYPE may have been shared since GCC hashes types. If it has a different
1961 CICO_LIST, make a copy. Likewise for the various flags. */
1962 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
1963 return_by_direct_ref_p, return_by_invisi_ref_p))
1964 {
1965 type = copy_type (type);
1966 TYPE_CI_CO_LIST (type) = cico_list;
1967 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
1968 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
1969 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
1970 }
1971
1972 return type;
1973 }
1974 \f
1975 /* Return a copy of TYPE but safe to modify in any way. */
1976
1977 tree
1978 copy_type (tree type)
1979 {
1980 tree new_type = copy_node (type);
1981
1982 /* Unshare the language-specific data. */
1983 if (TYPE_LANG_SPECIFIC (type))
1984 {
1985 TYPE_LANG_SPECIFIC (new_type) = NULL;
1986 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
1987 }
1988
1989 /* And the contents of the language-specific slot if needed. */
1990 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
1991 && TYPE_RM_VALUES (type))
1992 {
1993 TYPE_RM_VALUES (new_type) = NULL_TREE;
1994 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
1995 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
1996 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
1997 }
1998
1999 /* copy_node clears this field instead of copying it, because it is
2000 aliased with TREE_CHAIN. */
2001 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2002
2003 TYPE_POINTER_TO (new_type) = 0;
2004 TYPE_REFERENCE_TO (new_type) = 0;
2005 TYPE_MAIN_VARIANT (new_type) = new_type;
2006 TYPE_NEXT_VARIANT (new_type) = 0;
2007
2008 return new_type;
2009 }
2010 \f
2011 /* Return a subtype of sizetype with range MIN to MAX and whose
2012 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2013 of the associated TYPE_DECL. */
2014
2015 tree
2016 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2017 {
2018 /* First build a type for the desired range. */
2019 tree type = build_nonshared_range_type (sizetype, min, max);
2020
2021 /* Then set the index type. */
2022 SET_TYPE_INDEX_TYPE (type, index);
2023 create_type_decl (NULL_TREE, type, NULL, true, false, gnat_node);
2024
2025 return type;
2026 }
2027
2028 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2029 sizetype is used. */
2030
2031 tree
2032 create_range_type (tree type, tree min, tree max)
2033 {
2034 tree range_type;
2035
2036 if (type == NULL_TREE)
2037 type = sizetype;
2038
2039 /* First build a type with the base range. */
2040 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2041 TYPE_MAX_VALUE (type));
2042
2043 /* Then set the actual range. */
2044 SET_TYPE_RM_MIN_VALUE (range_type, convert (type, min));
2045 SET_TYPE_RM_MAX_VALUE (range_type, convert (type, max));
2046
2047 return range_type;
2048 }
2049 \f
2050 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2051 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2052 its data type. */
2053
2054 tree
2055 create_type_stub_decl (tree type_name, tree type)
2056 {
2057 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2058 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2059 emitted in DWARF. */
2060 tree type_decl = build_decl (input_location,
2061 TYPE_DECL, type_name, type);
2062 DECL_ARTIFICIAL (type_decl) = 1;
2063 TYPE_ARTIFICIAL (type) = 1;
2064 return type_decl;
2065 }
2066
2067 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2068 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2069 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2070 true if we need to write debug information about this type. GNAT_NODE
2071 is used for the position of the decl. */
2072
2073 tree
2074 create_type_decl (tree type_name, tree type, struct attrib *attr_list,
2075 bool artificial_p, bool debug_info_p, Node_Id gnat_node)
2076 {
2077 enum tree_code code = TREE_CODE (type);
2078 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2079 tree type_decl;
2080
2081 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2082 gcc_assert (!TYPE_IS_DUMMY_P (type));
2083
2084 /* If the type hasn't been named yet, we're naming it; preserve an existing
2085 TYPE_STUB_DECL that has been attached to it for some purpose. */
2086 if (!named && TYPE_STUB_DECL (type))
2087 {
2088 type_decl = TYPE_STUB_DECL (type);
2089 DECL_NAME (type_decl) = type_name;
2090 }
2091 else
2092 type_decl = build_decl (input_location,
2093 TYPE_DECL, type_name, type);
2094
2095 DECL_ARTIFICIAL (type_decl) = artificial_p;
2096 TYPE_ARTIFICIAL (type) = artificial_p;
2097
2098 /* Add this decl to the current binding level. */
2099 gnat_pushdecl (type_decl, gnat_node);
2100
2101 process_attributes (type_decl, attr_list);
2102
2103 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2104 This causes the name to be also viewed as a "tag" by the debug
2105 back-end, with the advantage that no DW_TAG_typedef is emitted
2106 for artificial "tagged" types in DWARF. */
2107 if (!named)
2108 TYPE_STUB_DECL (type) = type_decl;
2109
2110 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2111 back-end doesn't support, and for others if we don't need to. */
2112 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2113 DECL_IGNORED_P (type_decl) = 1;
2114
2115 return type_decl;
2116 }
2117 \f
2118 /* Return a VAR_DECL or CONST_DECL node.
2119
2120 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2121 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2122 the GCC tree for an optional initial expression; NULL_TREE if none.
2123
2124 CONST_FLAG is true if this variable is constant, in which case we might
2125 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2126
2127 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2128 definition to be made visible outside of the current compilation unit, for
2129 instance variable definitions in a package specification.
2130
2131 EXTERN_FLAG is true when processing an external variable declaration (as
2132 opposed to a definition: no storage is to be allocated for the variable).
2133
2134 STATIC_FLAG is only relevant when not at top level. In that case
2135 it indicates whether to always allocate storage to the variable.
2136
2137 GNAT_NODE is used for the position of the decl. */
2138
2139 tree
2140 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2141 bool const_flag, bool public_flag, bool extern_flag,
2142 bool static_flag, bool const_decl_allowed_p,
2143 struct attrib *attr_list, Node_Id gnat_node)
2144 {
2145 /* Whether the initializer is a constant initializer. At the global level
2146 or for an external object or an object to be allocated in static memory,
2147 we check that it is a valid constant expression for use in initializing
2148 a static variable; otherwise, we only check that it is constant. */
2149 bool init_const
2150 = (var_init != 0
2151 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2152 && (global_bindings_p () || extern_flag || static_flag
2153 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2154 : TREE_CONSTANT (var_init)));
2155
2156 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2157 case the initializer may be used in-lieu of the DECL node (as done in
2158 Identifier_to_gnu). This is useful to prevent the need of elaboration
2159 code when an identifier for which such a decl is made is in turn used as
2160 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2161 but extra constraints apply to this choice (see below) and are not
2162 relevant to the distinction we wish to make. */
2163 bool constant_p = const_flag && init_const;
2164
2165 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2166 and may be used for scalars in general but not for aggregates. */
2167 tree var_decl
2168 = build_decl (input_location,
2169 (constant_p && const_decl_allowed_p
2170 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2171 var_name, type);
2172
2173 /* If this is external, throw away any initializations (they will be done
2174 elsewhere) unless this is a constant for which we would like to remain
2175 able to get the initializer. If we are defining a global here, leave a
2176 constant initialization and save any variable elaborations for the
2177 elaboration routine. If we are just annotating types, throw away the
2178 initialization if it isn't a constant. */
2179 if ((extern_flag && !constant_p)
2180 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2181 var_init = NULL_TREE;
2182
2183 /* At the global level, an initializer requiring code to be generated
2184 produces elaboration statements. Check that such statements are allowed,
2185 that is, not violating a No_Elaboration_Code restriction. */
2186 if (global_bindings_p () && var_init != 0 && !init_const)
2187 Check_Elaboration_Code_Allowed (gnat_node);
2188
2189 DECL_INITIAL (var_decl) = var_init;
2190 TREE_READONLY (var_decl) = const_flag;
2191 DECL_EXTERNAL (var_decl) = extern_flag;
2192 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2193 TREE_CONSTANT (var_decl) = constant_p;
2194 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2195 = TYPE_VOLATILE (type);
2196
2197 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2198 try to fiddle with DECL_COMMON. However, on platforms that don't
2199 support global BSS sections, uninitialized global variables would
2200 go in DATA instead, thus increasing the size of the executable. */
2201 if (!flag_no_common
2202 && TREE_CODE (var_decl) == VAR_DECL
2203 && TREE_PUBLIC (var_decl)
2204 && !have_global_bss_p ())
2205 DECL_COMMON (var_decl) = 1;
2206
2207 /* At the global binding level, we need to allocate static storage for the
2208 variable if it isn't external. Otherwise, we allocate automatic storage
2209 unless requested not to. */
2210 TREE_STATIC (var_decl)
2211 = !extern_flag && (static_flag || global_bindings_p ());
2212
2213 /* For an external constant whose initializer is not absolute, do not emit
2214 debug info. In DWARF this would mean a global relocation in a read-only
2215 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2216 if (extern_flag
2217 && constant_p
2218 && var_init
2219 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2220 != null_pointer_node)
2221 DECL_IGNORED_P (var_decl) = 1;
2222
2223 /* Add this decl to the current binding level. */
2224 gnat_pushdecl (var_decl, gnat_node);
2225
2226 if (TREE_SIDE_EFFECTS (var_decl))
2227 TREE_ADDRESSABLE (var_decl) = 1;
2228
2229 if (TREE_CODE (var_decl) == VAR_DECL)
2230 {
2231 if (asm_name)
2232 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2233 process_attributes (var_decl, attr_list);
2234 if (global_bindings_p ())
2235 rest_of_decl_compilation (var_decl, true, 0);
2236 }
2237
2238 return var_decl;
2239 }
2240 \f
2241 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2242
2243 static bool
2244 aggregate_type_contains_array_p (tree type)
2245 {
2246 switch (TREE_CODE (type))
2247 {
2248 case RECORD_TYPE:
2249 case UNION_TYPE:
2250 case QUAL_UNION_TYPE:
2251 {
2252 tree field;
2253 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2254 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2255 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2256 return true;
2257 return false;
2258 }
2259
2260 case ARRAY_TYPE:
2261 return true;
2262
2263 default:
2264 gcc_unreachable ();
2265 }
2266 }
2267
2268 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2269 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2270 nonzero, it is the specified size of the field. If POS is nonzero, it is
2271 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2272 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2273 means we are allowed to take the address of the field; if it is negative,
2274 we should not make a bitfield, which is used by make_aligning_type. */
2275
2276 tree
2277 create_field_decl (tree field_name, tree field_type, tree record_type,
2278 tree size, tree pos, int packed, int addressable)
2279 {
2280 tree field_decl = build_decl (input_location,
2281 FIELD_DECL, field_name, field_type);
2282
2283 DECL_CONTEXT (field_decl) = record_type;
2284 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2285
2286 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2287 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2288 Likewise for an aggregate without specified position that contains an
2289 array, because in this case slices of variable length of this array
2290 must be handled by GCC and variable-sized objects need to be aligned
2291 to at least a byte boundary. */
2292 if (packed && (TYPE_MODE (field_type) == BLKmode
2293 || (!pos
2294 && AGGREGATE_TYPE_P (field_type)
2295 && aggregate_type_contains_array_p (field_type))))
2296 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2297
2298 /* If a size is specified, use it. Otherwise, if the record type is packed
2299 compute a size to use, which may differ from the object's natural size.
2300 We always set a size in this case to trigger the checks for bitfield
2301 creation below, which is typically required when no position has been
2302 specified. */
2303 if (size)
2304 size = convert (bitsizetype, size);
2305 else if (packed == 1)
2306 {
2307 size = rm_size (field_type);
2308 if (TYPE_MODE (field_type) == BLKmode)
2309 size = round_up (size, BITS_PER_UNIT);
2310 }
2311
2312 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2313 specified for two reasons: first if the size differs from the natural
2314 size. Second, if the alignment is insufficient. There are a number of
2315 ways the latter can be true.
2316
2317 We never make a bitfield if the type of the field has a nonconstant size,
2318 because no such entity requiring bitfield operations should reach here.
2319
2320 We do *preventively* make a bitfield when there might be the need for it
2321 but we don't have all the necessary information to decide, as is the case
2322 of a field with no specified position in a packed record.
2323
2324 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2325 in layout_decl or finish_record_type to clear the bit_field indication if
2326 it is in fact not needed. */
2327 if (addressable >= 0
2328 && size
2329 && TREE_CODE (size) == INTEGER_CST
2330 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2331 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2332 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2333 || packed
2334 || (TYPE_ALIGN (record_type) != 0
2335 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2336 {
2337 DECL_BIT_FIELD (field_decl) = 1;
2338 DECL_SIZE (field_decl) = size;
2339 if (!packed && !pos)
2340 {
2341 if (TYPE_ALIGN (record_type) != 0
2342 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2343 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2344 else
2345 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2346 }
2347 }
2348
2349 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2350
2351 /* Bump the alignment if need be, either for bitfield/packing purposes or
2352 to satisfy the type requirements if no such consideration applies. When
2353 we get the alignment from the type, indicate if this is from an explicit
2354 user request, which prevents stor-layout from lowering it later on. */
2355 {
2356 unsigned int bit_align
2357 = (DECL_BIT_FIELD (field_decl) ? 1
2358 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2359
2360 if (bit_align > DECL_ALIGN (field_decl))
2361 DECL_ALIGN (field_decl) = bit_align;
2362 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2363 {
2364 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2365 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2366 }
2367 }
2368
2369 if (pos)
2370 {
2371 /* We need to pass in the alignment the DECL is known to have.
2372 This is the lowest-order bit set in POS, but no more than
2373 the alignment of the record, if one is specified. Note
2374 that an alignment of 0 is taken as infinite. */
2375 unsigned int known_align;
2376
2377 if (host_integerp (pos, 1))
2378 known_align = tree_low_cst (pos, 1) & - tree_low_cst (pos, 1);
2379 else
2380 known_align = BITS_PER_UNIT;
2381
2382 if (TYPE_ALIGN (record_type)
2383 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2384 known_align = TYPE_ALIGN (record_type);
2385
2386 layout_decl (field_decl, known_align);
2387 SET_DECL_OFFSET_ALIGN (field_decl,
2388 host_integerp (pos, 1) ? BIGGEST_ALIGNMENT
2389 : BITS_PER_UNIT);
2390 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2391 &DECL_FIELD_BIT_OFFSET (field_decl),
2392 DECL_OFFSET_ALIGN (field_decl), pos);
2393 }
2394
2395 /* In addition to what our caller says, claim the field is addressable if we
2396 know that its type is not suitable.
2397
2398 The field may also be "technically" nonaddressable, meaning that even if
2399 we attempt to take the field's address we will actually get the address
2400 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2401 value we have at this point is not accurate enough, so we don't account
2402 for this here and let finish_record_type decide. */
2403 if (!addressable && !type_for_nonaliased_component_p (field_type))
2404 addressable = 1;
2405
2406 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2407
2408 return field_decl;
2409 }
2410 \f
2411 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2412 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2413 (either an In parameter or an address of a pass-by-ref parameter). */
2414
2415 tree
2416 create_param_decl (tree param_name, tree param_type, bool readonly)
2417 {
2418 tree param_decl = build_decl (input_location,
2419 PARM_DECL, param_name, param_type);
2420
2421 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2422 can lead to various ABI violations. */
2423 if (targetm.calls.promote_prototypes (NULL_TREE)
2424 && INTEGRAL_TYPE_P (param_type)
2425 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2426 {
2427 /* We have to be careful about biased types here. Make a subtype
2428 of integer_type_node with the proper biasing. */
2429 if (TREE_CODE (param_type) == INTEGER_TYPE
2430 && TYPE_BIASED_REPRESENTATION_P (param_type))
2431 {
2432 tree subtype
2433 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2434 TREE_TYPE (subtype) = integer_type_node;
2435 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2436 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2437 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2438 param_type = subtype;
2439 }
2440 else
2441 param_type = integer_type_node;
2442 }
2443
2444 DECL_ARG_TYPE (param_decl) = param_type;
2445 TREE_READONLY (param_decl) = readonly;
2446 return param_decl;
2447 }
2448 \f
2449 /* Given a DECL and ATTR_LIST, process the listed attributes. */
2450
2451 static void
2452 process_attributes (tree decl, struct attrib *attr_list)
2453 {
2454 for (; attr_list; attr_list = attr_list->next)
2455 switch (attr_list->type)
2456 {
2457 case ATTR_MACHINE_ATTRIBUTE:
2458 input_location = DECL_SOURCE_LOCATION (decl);
2459 decl_attributes (&decl, tree_cons (attr_list->name, attr_list->args,
2460 NULL_TREE),
2461 ATTR_FLAG_TYPE_IN_PLACE);
2462 break;
2463
2464 case ATTR_LINK_ALIAS:
2465 if (! DECL_EXTERNAL (decl))
2466 {
2467 TREE_STATIC (decl) = 1;
2468 assemble_alias (decl, attr_list->name);
2469 }
2470 break;
2471
2472 case ATTR_WEAK_EXTERNAL:
2473 if (SUPPORTS_WEAK)
2474 declare_weak (decl);
2475 else
2476 post_error ("?weak declarations not supported on this target",
2477 attr_list->error_point);
2478 break;
2479
2480 case ATTR_LINK_SECTION:
2481 if (targetm_common.have_named_sections)
2482 {
2483 DECL_SECTION_NAME (decl)
2484 = build_string (IDENTIFIER_LENGTH (attr_list->name),
2485 IDENTIFIER_POINTER (attr_list->name));
2486 DECL_COMMON (decl) = 0;
2487 }
2488 else
2489 post_error ("?section attributes are not supported for this target",
2490 attr_list->error_point);
2491 break;
2492
2493 case ATTR_LINK_CONSTRUCTOR:
2494 DECL_STATIC_CONSTRUCTOR (decl) = 1;
2495 TREE_USED (decl) = 1;
2496 break;
2497
2498 case ATTR_LINK_DESTRUCTOR:
2499 DECL_STATIC_DESTRUCTOR (decl) = 1;
2500 TREE_USED (decl) = 1;
2501 break;
2502
2503 case ATTR_THREAD_LOCAL_STORAGE:
2504 DECL_TLS_MODEL (decl) = decl_default_tls_model (decl);
2505 DECL_COMMON (decl) = 0;
2506 break;
2507 }
2508 }
2509 \f
2510 /* Record DECL as a global renaming pointer. */
2511
2512 void
2513 record_global_renaming_pointer (tree decl)
2514 {
2515 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2516 VEC_safe_push (tree, gc, global_renaming_pointers, decl);
2517 }
2518
2519 /* Invalidate the global renaming pointers. */
2520
2521 void
2522 invalidate_global_renaming_pointers (void)
2523 {
2524 unsigned int i;
2525 tree iter;
2526
2527 FOR_EACH_VEC_ELT (tree, global_renaming_pointers, i, iter)
2528 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2529
2530 VEC_free (tree, gc, global_renaming_pointers);
2531 }
2532
2533 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2534 a power of 2. */
2535
2536 bool
2537 value_factor_p (tree value, HOST_WIDE_INT factor)
2538 {
2539 if (host_integerp (value, 1))
2540 return tree_low_cst (value, 1) % factor == 0;
2541
2542 if (TREE_CODE (value) == MULT_EXPR)
2543 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2544 || value_factor_p (TREE_OPERAND (value, 1), factor));
2545
2546 return false;
2547 }
2548
2549 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2550 unless we can prove these 2 fields are laid out in such a way that no gap
2551 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2552 is the distance in bits between the end of PREV_FIELD and the starting
2553 position of CURR_FIELD. It is ignored if null. */
2554
2555 static bool
2556 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2557 {
2558 /* If this is the first field of the record, there cannot be any gap */
2559 if (!prev_field)
2560 return false;
2561
2562 /* If the previous field is a union type, then return False: The only
2563 time when such a field is not the last field of the record is when
2564 there are other components at fixed positions after it (meaning there
2565 was a rep clause for every field), in which case we don't want the
2566 alignment constraint to override them. */
2567 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2568 return false;
2569
2570 /* If the distance between the end of prev_field and the beginning of
2571 curr_field is constant, then there is a gap if the value of this
2572 constant is not null. */
2573 if (offset && host_integerp (offset, 1))
2574 return !integer_zerop (offset);
2575
2576 /* If the size and position of the previous field are constant,
2577 then check the sum of this size and position. There will be a gap
2578 iff it is not multiple of the current field alignment. */
2579 if (host_integerp (DECL_SIZE (prev_field), 1)
2580 && host_integerp (bit_position (prev_field), 1))
2581 return ((tree_low_cst (bit_position (prev_field), 1)
2582 + tree_low_cst (DECL_SIZE (prev_field), 1))
2583 % DECL_ALIGN (curr_field) != 0);
2584
2585 /* If both the position and size of the previous field are multiples
2586 of the current field alignment, there cannot be any gap. */
2587 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2588 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2589 return false;
2590
2591 /* Fallback, return that there may be a potential gap */
2592 return true;
2593 }
2594
2595 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2596 of the decl. */
2597
2598 tree
2599 create_label_decl (tree label_name, Node_Id gnat_node)
2600 {
2601 tree label_decl
2602 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2603
2604 DECL_MODE (label_decl) = VOIDmode;
2605
2606 /* Add this decl to the current binding level. */
2607 gnat_pushdecl (label_decl, gnat_node);
2608
2609 return label_decl;
2610 }
2611 \f
2612 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2613 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2614 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2615 PARM_DECL nodes chained through the DECL_CHAIN field).
2616
2617 INLINE_FLAG, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2618 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2619 used for the position of the decl. */
2620
2621 tree
2622 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
2623 tree param_decl_list, bool inline_flag, bool public_flag,
2624 bool extern_flag, bool artificial_flag,
2625 struct attrib *attr_list, Node_Id gnat_node)
2626 {
2627 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
2628 subprog_type);
2629 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
2630 TREE_TYPE (subprog_type));
2631 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
2632
2633 /* If this is a non-inline function nested inside an inlined external
2634 function, we cannot honor both requests without cloning the nested
2635 function in the current unit since it is private to the other unit.
2636 We could inline the nested function as well but it's probably better
2637 to err on the side of too little inlining. */
2638 if (!inline_flag
2639 && !public_flag
2640 && current_function_decl
2641 && DECL_DECLARED_INLINE_P (current_function_decl)
2642 && DECL_EXTERNAL (current_function_decl))
2643 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
2644
2645 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
2646 DECL_EXTERNAL (subprog_decl) = extern_flag;
2647 DECL_DECLARED_INLINE_P (subprog_decl) = inline_flag;
2648 DECL_NO_INLINE_WARNING_P (subprog_decl) = inline_flag && artificial_flag;
2649
2650 TREE_PUBLIC (subprog_decl) = public_flag;
2651 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
2652 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
2653 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
2654
2655 DECL_ARTIFICIAL (result_decl) = 1;
2656 DECL_IGNORED_P (result_decl) = 1;
2657 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
2658 DECL_RESULT (subprog_decl) = result_decl;
2659
2660 if (asm_name)
2661 {
2662 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
2663
2664 /* The expand_main_function circuitry expects "main_identifier_node" to
2665 designate the DECL_NAME of the 'main' entry point, in turn expected
2666 to be declared as the "main" function literally by default. Ada
2667 program entry points are typically declared with a different name
2668 within the binder generated file, exported as 'main' to satisfy the
2669 system expectations. Force main_identifier_node in this case. */
2670 if (asm_name == main_identifier_node)
2671 DECL_NAME (subprog_decl) = main_identifier_node;
2672 }
2673
2674 /* Add this decl to the current binding level. */
2675 gnat_pushdecl (subprog_decl, gnat_node);
2676
2677 process_attributes (subprog_decl, attr_list);
2678
2679 /* Output the assembler code and/or RTL for the declaration. */
2680 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
2681
2682 return subprog_decl;
2683 }
2684 \f
2685 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
2686 body. This routine needs to be invoked before processing the declarations
2687 appearing in the subprogram. */
2688
2689 void
2690 begin_subprog_body (tree subprog_decl)
2691 {
2692 tree param_decl;
2693
2694 announce_function (subprog_decl);
2695
2696 /* This function is being defined. */
2697 TREE_STATIC (subprog_decl) = 1;
2698
2699 current_function_decl = subprog_decl;
2700
2701 /* Enter a new binding level and show that all the parameters belong to
2702 this function. */
2703 gnat_pushlevel ();
2704
2705 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
2706 param_decl = DECL_CHAIN (param_decl))
2707 DECL_CONTEXT (param_decl) = subprog_decl;
2708
2709 make_decl_rtl (subprog_decl);
2710 }
2711
2712 /* Finish translating the current subprogram and set its BODY. */
2713
2714 void
2715 end_subprog_body (tree body)
2716 {
2717 tree fndecl = current_function_decl;
2718
2719 /* Attach the BLOCK for this level to the function and pop the level. */
2720 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
2721 DECL_INITIAL (fndecl) = current_binding_level->block;
2722 gnat_poplevel ();
2723
2724 /* Mark the RESULT_DECL as being in this subprogram. */
2725 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
2726
2727 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
2728 if (TREE_CODE (body) == BIND_EXPR)
2729 {
2730 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
2731 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
2732 }
2733
2734 DECL_SAVED_TREE (fndecl) = body;
2735
2736 current_function_decl = decl_function_context (fndecl);
2737 }
2738
2739 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
2740
2741 void
2742 rest_of_subprog_body_compilation (tree subprog_decl)
2743 {
2744 /* We cannot track the location of errors past this point. */
2745 error_gnat_node = Empty;
2746
2747 /* If we're only annotating types, don't actually compile this function. */
2748 if (type_annotate_only)
2749 return;
2750
2751 /* Dump functions before gimplification. */
2752 dump_function (TDI_original, subprog_decl);
2753
2754 if (!decl_function_context (subprog_decl))
2755 cgraph_finalize_function (subprog_decl, false);
2756 else
2757 /* Register this function with cgraph just far enough to get it
2758 added to our parent's nested function list. */
2759 (void) cgraph_get_create_node (subprog_decl);
2760 }
2761
2762 tree
2763 gnat_builtin_function (tree decl)
2764 {
2765 gnat_pushdecl (decl, Empty);
2766 return decl;
2767 }
2768
2769 /* Return an integer type with the number of bits of precision given by
2770 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
2771 it is a signed type. */
2772
2773 tree
2774 gnat_type_for_size (unsigned precision, int unsignedp)
2775 {
2776 tree t;
2777 char type_name[20];
2778
2779 if (precision <= 2 * MAX_BITS_PER_WORD
2780 && signed_and_unsigned_types[precision][unsignedp])
2781 return signed_and_unsigned_types[precision][unsignedp];
2782
2783 if (unsignedp)
2784 t = make_unsigned_type (precision);
2785 else
2786 t = make_signed_type (precision);
2787
2788 if (precision <= 2 * MAX_BITS_PER_WORD)
2789 signed_and_unsigned_types[precision][unsignedp] = t;
2790
2791 if (!TYPE_NAME (t))
2792 {
2793 sprintf (type_name, "%sSIGNED_%d", unsignedp ? "UN" : "", precision);
2794 TYPE_NAME (t) = get_identifier (type_name);
2795 }
2796
2797 return t;
2798 }
2799
2800 /* Likewise for floating-point types. */
2801
2802 static tree
2803 float_type_for_precision (int precision, enum machine_mode mode)
2804 {
2805 tree t;
2806 char type_name[20];
2807
2808 if (float_types[(int) mode])
2809 return float_types[(int) mode];
2810
2811 float_types[(int) mode] = t = make_node (REAL_TYPE);
2812 TYPE_PRECISION (t) = precision;
2813 layout_type (t);
2814
2815 gcc_assert (TYPE_MODE (t) == mode);
2816 if (!TYPE_NAME (t))
2817 {
2818 sprintf (type_name, "FLOAT_%d", precision);
2819 TYPE_NAME (t) = get_identifier (type_name);
2820 }
2821
2822 return t;
2823 }
2824
2825 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
2826 an unsigned type; otherwise a signed type is returned. */
2827
2828 tree
2829 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
2830 {
2831 if (mode == BLKmode)
2832 return NULL_TREE;
2833
2834 if (mode == VOIDmode)
2835 return void_type_node;
2836
2837 if (COMPLEX_MODE_P (mode))
2838 return NULL_TREE;
2839
2840 if (SCALAR_FLOAT_MODE_P (mode))
2841 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
2842
2843 if (SCALAR_INT_MODE_P (mode))
2844 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
2845
2846 if (VECTOR_MODE_P (mode))
2847 {
2848 enum machine_mode inner_mode = GET_MODE_INNER (mode);
2849 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
2850 if (inner_type)
2851 return build_vector_type_for_mode (inner_type, mode);
2852 }
2853
2854 return NULL_TREE;
2855 }
2856
2857 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
2858
2859 tree
2860 gnat_unsigned_type (tree type_node)
2861 {
2862 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
2863
2864 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2865 {
2866 type = copy_node (type);
2867 TREE_TYPE (type) = type_node;
2868 }
2869 else if (TREE_TYPE (type_node)
2870 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2871 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2872 {
2873 type = copy_node (type);
2874 TREE_TYPE (type) = TREE_TYPE (type_node);
2875 }
2876
2877 return type;
2878 }
2879
2880 /* Return the signed version of a TYPE_NODE, a scalar type. */
2881
2882 tree
2883 gnat_signed_type (tree type_node)
2884 {
2885 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
2886
2887 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
2888 {
2889 type = copy_node (type);
2890 TREE_TYPE (type) = type_node;
2891 }
2892 else if (TREE_TYPE (type_node)
2893 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
2894 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
2895 {
2896 type = copy_node (type);
2897 TREE_TYPE (type) = TREE_TYPE (type_node);
2898 }
2899
2900 return type;
2901 }
2902
2903 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
2904 transparently converted to each other. */
2905
2906 int
2907 gnat_types_compatible_p (tree t1, tree t2)
2908 {
2909 enum tree_code code;
2910
2911 /* This is the default criterion. */
2912 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
2913 return 1;
2914
2915 /* We only check structural equivalence here. */
2916 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
2917 return 0;
2918
2919 /* Vector types are also compatible if they have the same number of subparts
2920 and the same form of (scalar) element type. */
2921 if (code == VECTOR_TYPE
2922 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
2923 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
2924 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
2925 return 1;
2926
2927 /* Array types are also compatible if they are constrained and have the same
2928 domain(s) and the same component type. */
2929 if (code == ARRAY_TYPE
2930 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
2931 || (TYPE_DOMAIN (t1)
2932 && TYPE_DOMAIN (t2)
2933 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
2934 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
2935 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
2936 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
2937 && (TREE_TYPE (t1) == TREE_TYPE (t2)
2938 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
2939 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
2940 return 1;
2941
2942 return 0;
2943 }
2944
2945 /* Return true if EXPR is a useless type conversion. */
2946
2947 bool
2948 gnat_useless_type_conversion (tree expr)
2949 {
2950 if (CONVERT_EXPR_P (expr)
2951 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
2952 || TREE_CODE (expr) == NON_LVALUE_EXPR)
2953 return gnat_types_compatible_p (TREE_TYPE (expr),
2954 TREE_TYPE (TREE_OPERAND (expr, 0)));
2955
2956 return false;
2957 }
2958
2959 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
2960
2961 bool
2962 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
2963 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
2964 {
2965 return TYPE_CI_CO_LIST (t) == cico_list
2966 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
2967 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
2968 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
2969 }
2970 \f
2971 /* EXP is an expression for the size of an object. If this size contains
2972 discriminant references, replace them with the maximum (if MAX_P) or
2973 minimum (if !MAX_P) possible value of the discriminant. */
2974
2975 tree
2976 max_size (tree exp, bool max_p)
2977 {
2978 enum tree_code code = TREE_CODE (exp);
2979 tree type = TREE_TYPE (exp);
2980
2981 switch (TREE_CODE_CLASS (code))
2982 {
2983 case tcc_declaration:
2984 case tcc_constant:
2985 return exp;
2986
2987 case tcc_vl_exp:
2988 if (code == CALL_EXPR)
2989 {
2990 tree t, *argarray;
2991 int n, i;
2992
2993 t = maybe_inline_call_in_expr (exp);
2994 if (t)
2995 return max_size (t, max_p);
2996
2997 n = call_expr_nargs (exp);
2998 gcc_assert (n > 0);
2999 argarray = XALLOCAVEC (tree, n);
3000 for (i = 0; i < n; i++)
3001 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3002 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3003 }
3004 break;
3005
3006 case tcc_reference:
3007 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3008 modify. Otherwise, we treat it like a variable. */
3009 if (!CONTAINS_PLACEHOLDER_P (exp))
3010 return exp;
3011
3012 type = TREE_TYPE (TREE_OPERAND (exp, 1));
3013 return
3014 max_size (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type), true);
3015
3016 case tcc_comparison:
3017 return max_p ? size_one_node : size_zero_node;
3018
3019 case tcc_unary:
3020 case tcc_binary:
3021 case tcc_expression:
3022 switch (TREE_CODE_LENGTH (code))
3023 {
3024 case 1:
3025 if (code == SAVE_EXPR)
3026 return exp;
3027 else if (code == NON_LVALUE_EXPR)
3028 return max_size (TREE_OPERAND (exp, 0), max_p);
3029 else
3030 return
3031 fold_build1 (code, type,
3032 max_size (TREE_OPERAND (exp, 0),
3033 code == NEGATE_EXPR ? !max_p : max_p));
3034
3035 case 2:
3036 if (code == COMPOUND_EXPR)
3037 return max_size (TREE_OPERAND (exp, 1), max_p);
3038
3039 {
3040 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3041 tree rhs = max_size (TREE_OPERAND (exp, 1),
3042 code == MINUS_EXPR ? !max_p : max_p);
3043
3044 /* Special-case wanting the maximum value of a MIN_EXPR.
3045 In that case, if one side overflows, return the other.
3046 sizetype is signed, but we know sizes are non-negative.
3047 Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3048 overflowing and the RHS a variable. */
3049 if (max_p
3050 && code == MIN_EXPR
3051 && TREE_CODE (rhs) == INTEGER_CST
3052 && TREE_OVERFLOW (rhs))
3053 return lhs;
3054 else if (max_p
3055 && code == MIN_EXPR
3056 && TREE_CODE (lhs) == INTEGER_CST
3057 && TREE_OVERFLOW (lhs))
3058 return rhs;
3059 else if ((code == MINUS_EXPR || code == PLUS_EXPR)
3060 && TREE_CODE (lhs) == INTEGER_CST
3061 && TREE_OVERFLOW (lhs)
3062 && !TREE_CONSTANT (rhs))
3063 return lhs;
3064 else
3065 return fold_build2 (code, type, lhs, rhs);
3066 }
3067
3068 case 3:
3069 if (code == COND_EXPR)
3070 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3071 max_size (TREE_OPERAND (exp, 1), max_p),
3072 max_size (TREE_OPERAND (exp, 2), max_p));
3073 }
3074
3075 /* Other tree classes cannot happen. */
3076 default:
3077 break;
3078 }
3079
3080 gcc_unreachable ();
3081 }
3082 \f
3083 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3084 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3085 Return a constructor for the template. */
3086
3087 tree
3088 build_template (tree template_type, tree array_type, tree expr)
3089 {
3090 VEC(constructor_elt,gc) *template_elts = NULL;
3091 tree bound_list = NULL_TREE;
3092 tree field;
3093
3094 while (TREE_CODE (array_type) == RECORD_TYPE
3095 && (TYPE_PADDING_P (array_type)
3096 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3097 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3098
3099 if (TREE_CODE (array_type) == ARRAY_TYPE
3100 || (TREE_CODE (array_type) == INTEGER_TYPE
3101 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3102 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3103
3104 /* First make the list for a CONSTRUCTOR for the template. Go down the
3105 field list of the template instead of the type chain because this
3106 array might be an Ada array of arrays and we can't tell where the
3107 nested arrays stop being the underlying object. */
3108
3109 for (field = TYPE_FIELDS (template_type); field;
3110 (bound_list
3111 ? (bound_list = TREE_CHAIN (bound_list))
3112 : (array_type = TREE_TYPE (array_type))),
3113 field = DECL_CHAIN (DECL_CHAIN (field)))
3114 {
3115 tree bounds, min, max;
3116
3117 /* If we have a bound list, get the bounds from there. Likewise
3118 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3119 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3120 This will give us a maximum range. */
3121 if (bound_list)
3122 bounds = TREE_VALUE (bound_list);
3123 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3124 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3125 else if (expr && TREE_CODE (expr) == PARM_DECL
3126 && DECL_BY_COMPONENT_PTR_P (expr))
3127 bounds = TREE_TYPE (field);
3128 else
3129 gcc_unreachable ();
3130
3131 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3132 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3133
3134 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3135 substitute it from OBJECT. */
3136 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3137 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3138
3139 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3140 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3141 }
3142
3143 return gnat_build_constructor (template_type, template_elts);
3144 }
3145 \f
3146 /* Helper routine to make a descriptor field. FIELD_LIST is the list of decls
3147 being built; the new decl is chained on to the front of the list. */
3148
3149 static tree
3150 make_descriptor_field (const char *name, tree type, tree rec_type,
3151 tree initial, tree field_list)
3152 {
3153 tree field
3154 = create_field_decl (get_identifier (name), type, rec_type, NULL_TREE,
3155 NULL_TREE, 0, 0);
3156
3157 DECL_INITIAL (field) = initial;
3158 DECL_CHAIN (field) = field_list;
3159 return field;
3160 }
3161
3162 /* Build a 32-bit VMS descriptor from a Mechanism_Type, which must specify a
3163 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3164 type contains in its DECL_INITIAL the expression to use when a constructor
3165 is made for the type. GNAT_ENTITY is an entity used to print out an error
3166 message if the mechanism cannot be applied to an object of that type and
3167 also for the name. */
3168
3169 tree
3170 build_vms_descriptor32 (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3171 {
3172 tree record_type = make_node (RECORD_TYPE);
3173 tree pointer32_type, pointer64_type;
3174 tree field_list = NULL_TREE;
3175 int klass, ndim, i, dtype = 0;
3176 tree inner_type, tem;
3177 tree *idx_arr;
3178
3179 /* If TYPE is an unconstrained array, use the underlying array type. */
3180 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3181 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3182
3183 /* If this is an array, compute the number of dimensions in the array,
3184 get the index types, and point to the inner type. */
3185 if (TREE_CODE (type) != ARRAY_TYPE)
3186 ndim = 0;
3187 else
3188 for (ndim = 1, inner_type = type;
3189 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3190 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3191 ndim++, inner_type = TREE_TYPE (inner_type))
3192 ;
3193
3194 idx_arr = XALLOCAVEC (tree, ndim);
3195
3196 if (mech != By_Descriptor_NCA && mech != By_Short_Descriptor_NCA
3197 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3198 for (i = ndim - 1, inner_type = type;
3199 i >= 0;
3200 i--, inner_type = TREE_TYPE (inner_type))
3201 idx_arr[i] = TYPE_DOMAIN (inner_type);
3202 else
3203 for (i = 0, inner_type = type;
3204 i < ndim;
3205 i++, inner_type = TREE_TYPE (inner_type))
3206 idx_arr[i] = TYPE_DOMAIN (inner_type);
3207
3208 /* Now get the DTYPE value. */
3209 switch (TREE_CODE (type))
3210 {
3211 case INTEGER_TYPE:
3212 case ENUMERAL_TYPE:
3213 case BOOLEAN_TYPE:
3214 if (TYPE_VAX_FLOATING_POINT_P (type))
3215 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3216 {
3217 case 6:
3218 dtype = 10;
3219 break;
3220 case 9:
3221 dtype = 11;
3222 break;
3223 case 15:
3224 dtype = 27;
3225 break;
3226 }
3227 else
3228 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3229 {
3230 case 8:
3231 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3232 break;
3233 case 16:
3234 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3235 break;
3236 case 32:
3237 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3238 break;
3239 case 64:
3240 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3241 break;
3242 case 128:
3243 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3244 break;
3245 }
3246 break;
3247
3248 case REAL_TYPE:
3249 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3250 break;
3251
3252 case COMPLEX_TYPE:
3253 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3254 && TYPE_VAX_FLOATING_POINT_P (type))
3255 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3256 {
3257 case 6:
3258 dtype = 12;
3259 break;
3260 case 9:
3261 dtype = 13;
3262 break;
3263 case 15:
3264 dtype = 29;
3265 }
3266 else
3267 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3268 break;
3269
3270 case ARRAY_TYPE:
3271 dtype = 14;
3272 break;
3273
3274 default:
3275 break;
3276 }
3277
3278 /* Get the CLASS value. */
3279 switch (mech)
3280 {
3281 case By_Descriptor_A:
3282 case By_Short_Descriptor_A:
3283 klass = 4;
3284 break;
3285 case By_Descriptor_NCA:
3286 case By_Short_Descriptor_NCA:
3287 klass = 10;
3288 break;
3289 case By_Descriptor_SB:
3290 case By_Short_Descriptor_SB:
3291 klass = 15;
3292 break;
3293 case By_Descriptor:
3294 case By_Short_Descriptor:
3295 case By_Descriptor_S:
3296 case By_Short_Descriptor_S:
3297 default:
3298 klass = 1;
3299 break;
3300 }
3301
3302 /* Make the type for a descriptor for VMS. The first four fields are the
3303 same for all types. */
3304 field_list
3305 = make_descriptor_field ("LENGTH", gnat_type_for_size (16, 1), record_type,
3306 size_in_bytes ((mech == By_Descriptor_A
3307 || mech == By_Short_Descriptor_A)
3308 ? inner_type : type),
3309 field_list);
3310 field_list
3311 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1), record_type,
3312 size_int (dtype), field_list);
3313 field_list
3314 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1), record_type,
3315 size_int (klass), field_list);
3316
3317 pointer32_type = build_pointer_type_for_mode (type, SImode, false);
3318 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3319
3320 /* Ensure that only 32-bit pointers are passed in 32-bit descriptors. Note
3321 that we cannot build a template call to the CE routine as it would get a
3322 wrong source location; instead we use a second placeholder for it. */
3323 tem = build_unary_op (ADDR_EXPR, pointer64_type,
3324 build0 (PLACEHOLDER_EXPR, type));
3325 tem = build3 (COND_EXPR, pointer32_type,
3326 Pmode != SImode
3327 ? build_binary_op (GE_EXPR, boolean_type_node, tem,
3328 build_int_cstu (pointer64_type, 0x80000000))
3329 : boolean_false_node,
3330 build0 (PLACEHOLDER_EXPR, void_type_node),
3331 convert (pointer32_type, tem));
3332
3333 field_list
3334 = make_descriptor_field ("POINTER", pointer32_type, record_type, tem,
3335 field_list);
3336
3337 switch (mech)
3338 {
3339 case By_Descriptor:
3340 case By_Short_Descriptor:
3341 case By_Descriptor_S:
3342 case By_Short_Descriptor_S:
3343 break;
3344
3345 case By_Descriptor_SB:
3346 case By_Short_Descriptor_SB:
3347 field_list
3348 = make_descriptor_field ("SB_L1", gnat_type_for_size (32, 1),
3349 record_type,
3350 (TREE_CODE (type) == ARRAY_TYPE
3351 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3352 : size_zero_node),
3353 field_list);
3354 field_list
3355 = make_descriptor_field ("SB_U1", gnat_type_for_size (32, 1),
3356 record_type,
3357 (TREE_CODE (type) == ARRAY_TYPE
3358 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3359 : size_zero_node),
3360 field_list);
3361 break;
3362
3363 case By_Descriptor_A:
3364 case By_Short_Descriptor_A:
3365 case By_Descriptor_NCA:
3366 case By_Short_Descriptor_NCA:
3367 field_list
3368 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3369 record_type, size_zero_node, field_list);
3370
3371 field_list
3372 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3373 record_type, size_zero_node, field_list);
3374
3375 field_list
3376 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3377 record_type,
3378 size_int ((mech == By_Descriptor_NCA
3379 || mech == By_Short_Descriptor_NCA)
3380 ? 0
3381 /* Set FL_COLUMN, FL_COEFF, and
3382 FL_BOUNDS. */
3383 : (TREE_CODE (type) == ARRAY_TYPE
3384 && TYPE_CONVENTION_FORTRAN_P
3385 (type)
3386 ? 224 : 192)),
3387 field_list);
3388
3389 field_list
3390 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3391 record_type, size_int (ndim), field_list);
3392
3393 field_list
3394 = make_descriptor_field ("ARSIZE", gnat_type_for_size (32, 1),
3395 record_type, size_in_bytes (type),
3396 field_list);
3397
3398 /* Now build a pointer to the 0,0,0... element. */
3399 tem = build0 (PLACEHOLDER_EXPR, type);
3400 for (i = 0, inner_type = type; i < ndim;
3401 i++, inner_type = TREE_TYPE (inner_type))
3402 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3403 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3404 NULL_TREE, NULL_TREE);
3405
3406 field_list
3407 = make_descriptor_field ("A0", pointer32_type, record_type,
3408 build1 (ADDR_EXPR, pointer32_type, tem),
3409 field_list);
3410
3411 /* Next come the addressing coefficients. */
3412 tem = size_one_node;
3413 for (i = 0; i < ndim; i++)
3414 {
3415 char fname[3];
3416 tree idx_length
3417 = size_binop (MULT_EXPR, tem,
3418 size_binop (PLUS_EXPR,
3419 size_binop (MINUS_EXPR,
3420 TYPE_MAX_VALUE (idx_arr[i]),
3421 TYPE_MIN_VALUE (idx_arr[i])),
3422 size_int (1)));
3423
3424 fname[0] = ((mech == By_Descriptor_NCA ||
3425 mech == By_Short_Descriptor_NCA) ? 'S' : 'M');
3426 fname[1] = '0' + i, fname[2] = 0;
3427 field_list
3428 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3429 record_type, idx_length, field_list);
3430
3431 if (mech == By_Descriptor_NCA || mech == By_Short_Descriptor_NCA)
3432 tem = idx_length;
3433 }
3434
3435 /* Finally here are the bounds. */
3436 for (i = 0; i < ndim; i++)
3437 {
3438 char fname[3];
3439
3440 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3441 field_list
3442 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3443 record_type, TYPE_MIN_VALUE (idx_arr[i]),
3444 field_list);
3445
3446 fname[0] = 'U';
3447 field_list
3448 = make_descriptor_field (fname, gnat_type_for_size (32, 1),
3449 record_type, TYPE_MAX_VALUE (idx_arr[i]),
3450 field_list);
3451 }
3452 break;
3453
3454 default:
3455 post_error ("unsupported descriptor type for &", gnat_entity);
3456 }
3457
3458 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC");
3459 finish_record_type (record_type, nreverse (field_list), 0, false);
3460 return record_type;
3461 }
3462
3463 /* Build a 64-bit VMS descriptor from a Mechanism_Type, which must specify a
3464 descriptor type, and the GCC type of an object. Each FIELD_DECL in the
3465 type contains in its DECL_INITIAL the expression to use when a constructor
3466 is made for the type. GNAT_ENTITY is an entity used to print out an error
3467 message if the mechanism cannot be applied to an object of that type and
3468 also for the name. */
3469
3470 tree
3471 build_vms_descriptor (tree type, Mechanism_Type mech, Entity_Id gnat_entity)
3472 {
3473 tree record_type = make_node (RECORD_TYPE);
3474 tree pointer64_type;
3475 tree field_list = NULL_TREE;
3476 int klass, ndim, i, dtype = 0;
3477 tree inner_type, tem;
3478 tree *idx_arr;
3479
3480 /* If TYPE is an unconstrained array, use the underlying array type. */
3481 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
3482 type = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type))));
3483
3484 /* If this is an array, compute the number of dimensions in the array,
3485 get the index types, and point to the inner type. */
3486 if (TREE_CODE (type) != ARRAY_TYPE)
3487 ndim = 0;
3488 else
3489 for (ndim = 1, inner_type = type;
3490 TREE_CODE (TREE_TYPE (inner_type)) == ARRAY_TYPE
3491 && TYPE_MULTI_ARRAY_P (TREE_TYPE (inner_type));
3492 ndim++, inner_type = TREE_TYPE (inner_type))
3493 ;
3494
3495 idx_arr = XALLOCAVEC (tree, ndim);
3496
3497 if (mech != By_Descriptor_NCA
3498 && TREE_CODE (type) == ARRAY_TYPE && TYPE_CONVENTION_FORTRAN_P (type))
3499 for (i = ndim - 1, inner_type = type;
3500 i >= 0;
3501 i--, inner_type = TREE_TYPE (inner_type))
3502 idx_arr[i] = TYPE_DOMAIN (inner_type);
3503 else
3504 for (i = 0, inner_type = type;
3505 i < ndim;
3506 i++, inner_type = TREE_TYPE (inner_type))
3507 idx_arr[i] = TYPE_DOMAIN (inner_type);
3508
3509 /* Now get the DTYPE value. */
3510 switch (TREE_CODE (type))
3511 {
3512 case INTEGER_TYPE:
3513 case ENUMERAL_TYPE:
3514 case BOOLEAN_TYPE:
3515 if (TYPE_VAX_FLOATING_POINT_P (type))
3516 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3517 {
3518 case 6:
3519 dtype = 10;
3520 break;
3521 case 9:
3522 dtype = 11;
3523 break;
3524 case 15:
3525 dtype = 27;
3526 break;
3527 }
3528 else
3529 switch (GET_MODE_BITSIZE (TYPE_MODE (type)))
3530 {
3531 case 8:
3532 dtype = TYPE_UNSIGNED (type) ? 2 : 6;
3533 break;
3534 case 16:
3535 dtype = TYPE_UNSIGNED (type) ? 3 : 7;
3536 break;
3537 case 32:
3538 dtype = TYPE_UNSIGNED (type) ? 4 : 8;
3539 break;
3540 case 64:
3541 dtype = TYPE_UNSIGNED (type) ? 5 : 9;
3542 break;
3543 case 128:
3544 dtype = TYPE_UNSIGNED (type) ? 25 : 26;
3545 break;
3546 }
3547 break;
3548
3549 case REAL_TYPE:
3550 dtype = GET_MODE_BITSIZE (TYPE_MODE (type)) == 32 ? 52 : 53;
3551 break;
3552
3553 case COMPLEX_TYPE:
3554 if (TREE_CODE (TREE_TYPE (type)) == INTEGER_TYPE
3555 && TYPE_VAX_FLOATING_POINT_P (type))
3556 switch (tree_low_cst (TYPE_DIGITS_VALUE (type), 1))
3557 {
3558 case 6:
3559 dtype = 12;
3560 break;
3561 case 9:
3562 dtype = 13;
3563 break;
3564 case 15:
3565 dtype = 29;
3566 }
3567 else
3568 dtype = GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) == 32 ? 54: 55;
3569 break;
3570
3571 case ARRAY_TYPE:
3572 dtype = 14;
3573 break;
3574
3575 default:
3576 break;
3577 }
3578
3579 /* Get the CLASS value. */
3580 switch (mech)
3581 {
3582 case By_Descriptor_A:
3583 klass = 4;
3584 break;
3585 case By_Descriptor_NCA:
3586 klass = 10;
3587 break;
3588 case By_Descriptor_SB:
3589 klass = 15;
3590 break;
3591 case By_Descriptor:
3592 case By_Descriptor_S:
3593 default:
3594 klass = 1;
3595 break;
3596 }
3597
3598 /* Make the type for a 64-bit descriptor for VMS. The first six fields
3599 are the same for all types. */
3600 field_list
3601 = make_descriptor_field ("MBO", gnat_type_for_size (16, 1),
3602 record_type, size_int (1), field_list);
3603 field_list
3604 = make_descriptor_field ("DTYPE", gnat_type_for_size (8, 1),
3605 record_type, size_int (dtype), field_list);
3606 field_list
3607 = make_descriptor_field ("CLASS", gnat_type_for_size (8, 1),
3608 record_type, size_int (klass), field_list);
3609 field_list
3610 = make_descriptor_field ("MBMO", gnat_type_for_size (32, 1),
3611 record_type, size_int (-1), field_list);
3612 field_list
3613 = make_descriptor_field ("LENGTH", gnat_type_for_size (64, 1),
3614 record_type,
3615 size_in_bytes (mech == By_Descriptor_A
3616 ? inner_type : type),
3617 field_list);
3618
3619 pointer64_type = build_pointer_type_for_mode (type, DImode, false);
3620
3621 field_list
3622 = make_descriptor_field ("POINTER", pointer64_type, record_type,
3623 build_unary_op (ADDR_EXPR, pointer64_type,
3624 build0 (PLACEHOLDER_EXPR, type)),
3625 field_list);
3626
3627 switch (mech)
3628 {
3629 case By_Descriptor:
3630 case By_Descriptor_S:
3631 break;
3632
3633 case By_Descriptor_SB:
3634 field_list
3635 = make_descriptor_field ("SB_L1", gnat_type_for_size (64, 1),
3636 record_type,
3637 (TREE_CODE (type) == ARRAY_TYPE
3638 ? TYPE_MIN_VALUE (TYPE_DOMAIN (type))
3639 : size_zero_node),
3640 field_list);
3641 field_list
3642 = make_descriptor_field ("SB_U1", gnat_type_for_size (64, 1),
3643 record_type,
3644 (TREE_CODE (type) == ARRAY_TYPE
3645 ? TYPE_MAX_VALUE (TYPE_DOMAIN (type))
3646 : size_zero_node),
3647 field_list);
3648 break;
3649
3650 case By_Descriptor_A:
3651 case By_Descriptor_NCA:
3652 field_list
3653 = make_descriptor_field ("SCALE", gnat_type_for_size (8, 1),
3654 record_type, size_zero_node, field_list);
3655
3656 field_list
3657 = make_descriptor_field ("DIGITS", gnat_type_for_size (8, 1),
3658 record_type, size_zero_node, field_list);
3659
3660 dtype = (mech == By_Descriptor_NCA
3661 ? 0
3662 /* Set FL_COLUMN, FL_COEFF, and
3663 FL_BOUNDS. */
3664 : (TREE_CODE (type) == ARRAY_TYPE
3665 && TYPE_CONVENTION_FORTRAN_P (type)
3666 ? 224 : 192));
3667 field_list
3668 = make_descriptor_field ("AFLAGS", gnat_type_for_size (8, 1),
3669 record_type, size_int (dtype),
3670 field_list);
3671
3672 field_list
3673 = make_descriptor_field ("DIMCT", gnat_type_for_size (8, 1),
3674 record_type, size_int (ndim), field_list);
3675
3676 field_list
3677 = make_descriptor_field ("MBZ", gnat_type_for_size (32, 1),
3678 record_type, size_int (0), field_list);
3679 field_list
3680 = make_descriptor_field ("ARSIZE", gnat_type_for_size (64, 1),
3681 record_type, size_in_bytes (type),
3682 field_list);
3683
3684 /* Now build a pointer to the 0,0,0... element. */
3685 tem = build0 (PLACEHOLDER_EXPR, type);
3686 for (i = 0, inner_type = type; i < ndim;
3687 i++, inner_type = TREE_TYPE (inner_type))
3688 tem = build4 (ARRAY_REF, TREE_TYPE (inner_type), tem,
3689 convert (TYPE_DOMAIN (inner_type), size_zero_node),
3690 NULL_TREE, NULL_TREE);
3691
3692 field_list
3693 = make_descriptor_field ("A0", pointer64_type, record_type,
3694 build1 (ADDR_EXPR, pointer64_type, tem),
3695 field_list);
3696
3697 /* Next come the addressing coefficients. */
3698 tem = size_one_node;
3699 for (i = 0; i < ndim; i++)
3700 {
3701 char fname[3];
3702 tree idx_length
3703 = size_binop (MULT_EXPR, tem,
3704 size_binop (PLUS_EXPR,
3705 size_binop (MINUS_EXPR,
3706 TYPE_MAX_VALUE (idx_arr[i]),
3707 TYPE_MIN_VALUE (idx_arr[i])),
3708 size_int (1)));
3709
3710 fname[0] = (mech == By_Descriptor_NCA ? 'S' : 'M');
3711 fname[1] = '0' + i, fname[2] = 0;
3712 field_list
3713 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3714 record_type, idx_length, field_list);
3715
3716 if (mech == By_Descriptor_NCA)
3717 tem = idx_length;
3718 }
3719
3720 /* Finally here are the bounds. */
3721 for (i = 0; i < ndim; i++)
3722 {
3723 char fname[3];
3724
3725 fname[0] = 'L', fname[1] = '0' + i, fname[2] = 0;
3726 field_list
3727 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3728 record_type,
3729 TYPE_MIN_VALUE (idx_arr[i]), field_list);
3730
3731 fname[0] = 'U';
3732 field_list
3733 = make_descriptor_field (fname, gnat_type_for_size (64, 1),
3734 record_type,
3735 TYPE_MAX_VALUE (idx_arr[i]), field_list);
3736 }
3737 break;
3738
3739 default:
3740 post_error ("unsupported descriptor type for &", gnat_entity);
3741 }
3742
3743 TYPE_NAME (record_type) = create_concat_name (gnat_entity, "DESC64");
3744 finish_record_type (record_type, nreverse (field_list), 0, false);
3745 return record_type;
3746 }
3747
3748 /* Fill in a VMS descriptor of GNU_TYPE for GNU_EXPR and return the result.
3749 GNAT_ACTUAL is the actual parameter for which the descriptor is built. */
3750
3751 tree
3752 fill_vms_descriptor (tree gnu_type, tree gnu_expr, Node_Id gnat_actual)
3753 {
3754 VEC(constructor_elt,gc) *v = NULL;
3755 tree field;
3756
3757 gnu_expr = maybe_unconstrained_array (gnu_expr);
3758 gnu_expr = gnat_protect_expr (gnu_expr);
3759 gnat_mark_addressable (gnu_expr);
3760
3761 /* We may need to substitute both GNU_EXPR and a CALL_EXPR to the raise CE
3762 routine in case we have a 32-bit descriptor. */
3763 gnu_expr = build2 (COMPOUND_EXPR, void_type_node,
3764 build_call_raise (CE_Range_Check_Failed, gnat_actual,
3765 N_Raise_Constraint_Error),
3766 gnu_expr);
3767
3768 for (field = TYPE_FIELDS (gnu_type); field; field = DECL_CHAIN (field))
3769 {
3770 tree value
3771 = convert (TREE_TYPE (field),
3772 SUBSTITUTE_PLACEHOLDER_IN_EXPR (DECL_INITIAL (field),
3773 gnu_expr));
3774 CONSTRUCTOR_APPEND_ELT (v, field, value);
3775 }
3776
3777 return gnat_build_constructor (gnu_type, v);
3778 }
3779
3780 /* Convert GNU_EXPR, a pointer to a 64bit VMS descriptor, to GNU_TYPE, a
3781 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3782 which the VMS descriptor is passed. */
3783
3784 static tree
3785 convert_vms_descriptor64 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3786 {
3787 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3788 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3789 /* The CLASS field is the 3rd field in the descriptor. */
3790 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3791 /* The POINTER field is the 6th field in the descriptor. */
3792 tree pointer = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (klass)));
3793
3794 /* Retrieve the value of the POINTER field. */
3795 tree gnu_expr64
3796 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3797
3798 if (POINTER_TYPE_P (gnu_type))
3799 return convert (gnu_type, gnu_expr64);
3800
3801 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3802 {
3803 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3804 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3805 tree template_type = TREE_TYPE (p_bounds_type);
3806 tree min_field = TYPE_FIELDS (template_type);
3807 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3808 tree template_tree, template_addr, aflags, dimct, t, u;
3809 /* See the head comment of build_vms_descriptor. */
3810 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3811 tree lfield, ufield;
3812 VEC(constructor_elt,gc) *v;
3813
3814 /* Convert POINTER to the pointer-to-array type. */
3815 gnu_expr64 = convert (p_array_type, gnu_expr64);
3816
3817 switch (iklass)
3818 {
3819 case 1: /* Class S */
3820 case 15: /* Class SB */
3821 /* Build {1, LENGTH} template; LENGTH64 is the 5th field. */
3822 v = VEC_alloc (constructor_elt, gc, 2);
3823 t = DECL_CHAIN (DECL_CHAIN (klass));
3824 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3825 CONSTRUCTOR_APPEND_ELT (v, min_field,
3826 convert (TREE_TYPE (min_field),
3827 integer_one_node));
3828 CONSTRUCTOR_APPEND_ELT (v, max_field,
3829 convert (TREE_TYPE (max_field), t));
3830 template_tree = gnat_build_constructor (template_type, v);
3831 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3832
3833 /* For class S, we are done. */
3834 if (iklass == 1)
3835 break;
3836
3837 /* Test that we really have a SB descriptor, like DEC Ada. */
3838 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3839 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3840 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3841 /* If so, there is already a template in the descriptor and
3842 it is located right after the POINTER field. The fields are
3843 64bits so they must be repacked. */
3844 t = DECL_CHAIN (pointer);
3845 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3846 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3847
3848 t = DECL_CHAIN (t);
3849 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3850 ufield = convert
3851 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3852
3853 /* Build the template in the form of a constructor. */
3854 v = VEC_alloc (constructor_elt, gc, 2);
3855 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3856 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3857 ufield);
3858 template_tree = gnat_build_constructor (template_type, v);
3859
3860 /* Otherwise use the {1, LENGTH} template we build above. */
3861 template_addr = build3 (COND_EXPR, p_bounds_type, u,
3862 build_unary_op (ADDR_EXPR, p_bounds_type,
3863 template_tree),
3864 template_addr);
3865 break;
3866
3867 case 4: /* Class A */
3868 /* The AFLAGS field is the 3rd field after the pointer in the
3869 descriptor. */
3870 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
3871 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3872 /* The DIMCT field is the next field in the descriptor after
3873 aflags. */
3874 t = DECL_CHAIN (t);
3875 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3876 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
3877 or FL_COEFF or FL_BOUNDS not set. */
3878 u = build_int_cst (TREE_TYPE (aflags), 192);
3879 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
3880 build_binary_op (NE_EXPR, boolean_type_node,
3881 dimct,
3882 convert (TREE_TYPE (dimct),
3883 size_one_node)),
3884 build_binary_op (NE_EXPR, boolean_type_node,
3885 build2 (BIT_AND_EXPR,
3886 TREE_TYPE (aflags),
3887 aflags, u),
3888 u));
3889 /* There is already a template in the descriptor and it is located
3890 in block 3. The fields are 64bits so they must be repacked. */
3891 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN
3892 (t)))));
3893 lfield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3894 lfield = convert (TREE_TYPE (TYPE_FIELDS (template_type)), lfield);
3895
3896 t = DECL_CHAIN (t);
3897 ufield = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3898 ufield = convert
3899 (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (template_type))), ufield);
3900
3901 /* Build the template in the form of a constructor. */
3902 v = VEC_alloc (constructor_elt, gc, 2);
3903 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (template_type), lfield);
3904 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (template_type)),
3905 ufield);
3906 template_tree = gnat_build_constructor (template_type, v);
3907 template_tree = build3 (COND_EXPR, template_type, u,
3908 build_call_raise (CE_Length_Check_Failed, Empty,
3909 N_Raise_Constraint_Error),
3910 template_tree);
3911 template_addr
3912 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
3913 break;
3914
3915 case 10: /* Class NCA */
3916 default:
3917 post_error ("unsupported descriptor type for &", gnat_subprog);
3918 template_addr = integer_zero_node;
3919 break;
3920 }
3921
3922 /* Build the fat pointer in the form of a constructor. */
3923 v = VEC_alloc (constructor_elt, gc, 2);
3924 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr64);
3925 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
3926 template_addr);
3927 return gnat_build_constructor (gnu_type, v);
3928 }
3929
3930 else
3931 gcc_unreachable ();
3932 }
3933
3934 /* Convert GNU_EXPR, a pointer to a 32bit VMS descriptor, to GNU_TYPE, a
3935 regular pointer or fat pointer type. GNAT_SUBPROG is the subprogram to
3936 which the VMS descriptor is passed. */
3937
3938 static tree
3939 convert_vms_descriptor32 (tree gnu_type, tree gnu_expr, Entity_Id gnat_subprog)
3940 {
3941 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
3942 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
3943 /* The CLASS field is the 3rd field in the descriptor. */
3944 tree klass = DECL_CHAIN (DECL_CHAIN (TYPE_FIELDS (desc_type)));
3945 /* The POINTER field is the 4th field in the descriptor. */
3946 tree pointer = DECL_CHAIN (klass);
3947
3948 /* Retrieve the value of the POINTER field. */
3949 tree gnu_expr32
3950 = build3 (COMPONENT_REF, TREE_TYPE (pointer), desc, pointer, NULL_TREE);
3951
3952 if (POINTER_TYPE_P (gnu_type))
3953 return convert (gnu_type, gnu_expr32);
3954
3955 else if (TYPE_IS_FAT_POINTER_P (gnu_type))
3956 {
3957 tree p_array_type = TREE_TYPE (TYPE_FIELDS (gnu_type));
3958 tree p_bounds_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (gnu_type)));
3959 tree template_type = TREE_TYPE (p_bounds_type);
3960 tree min_field = TYPE_FIELDS (template_type);
3961 tree max_field = DECL_CHAIN (TYPE_FIELDS (template_type));
3962 tree template_tree, template_addr, aflags, dimct, t, u;
3963 /* See the head comment of build_vms_descriptor. */
3964 int iklass = TREE_INT_CST_LOW (DECL_INITIAL (klass));
3965 VEC(constructor_elt,gc) *v;
3966
3967 /* Convert POINTER to the pointer-to-array type. */
3968 gnu_expr32 = convert (p_array_type, gnu_expr32);
3969
3970 switch (iklass)
3971 {
3972 case 1: /* Class S */
3973 case 15: /* Class SB */
3974 /* Build {1, LENGTH} template; LENGTH is the 1st field. */
3975 v = VEC_alloc (constructor_elt, gc, 2);
3976 t = TYPE_FIELDS (desc_type);
3977 t = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3978 CONSTRUCTOR_APPEND_ELT (v, min_field,
3979 convert (TREE_TYPE (min_field),
3980 integer_one_node));
3981 CONSTRUCTOR_APPEND_ELT (v, max_field,
3982 convert (TREE_TYPE (max_field), t));
3983 template_tree = gnat_build_constructor (template_type, v);
3984 template_addr = build_unary_op (ADDR_EXPR, NULL_TREE, template_tree);
3985
3986 /* For class S, we are done. */
3987 if (iklass == 1)
3988 break;
3989
3990 /* Test that we really have a SB descriptor, like DEC Ada. */
3991 t = build3 (COMPONENT_REF, TREE_TYPE (klass), desc, klass, NULL);
3992 u = convert (TREE_TYPE (klass), DECL_INITIAL (klass));
3993 u = build_binary_op (EQ_EXPR, boolean_type_node, t, u);
3994 /* If so, there is already a template in the descriptor and
3995 it is located right after the POINTER field. */
3996 t = DECL_CHAIN (pointer);
3997 template_tree
3998 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
3999 /* Otherwise use the {1, LENGTH} template we build above. */
4000 template_addr = build3 (COND_EXPR, p_bounds_type, u,
4001 build_unary_op (ADDR_EXPR, p_bounds_type,
4002 template_tree),
4003 template_addr);
4004 break;
4005
4006 case 4: /* Class A */
4007 /* The AFLAGS field is the 7th field in the descriptor. */
4008 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (pointer)));
4009 aflags = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4010 /* The DIMCT field is the 8th field in the descriptor. */
4011 t = DECL_CHAIN (t);
4012 dimct = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4013 /* Raise CONSTRAINT_ERROR if either more than 1 dimension
4014 or FL_COEFF or FL_BOUNDS not set. */
4015 u = build_int_cst (TREE_TYPE (aflags), 192);
4016 u = build_binary_op (TRUTH_OR_EXPR, boolean_type_node,
4017 build_binary_op (NE_EXPR, boolean_type_node,
4018 dimct,
4019 convert (TREE_TYPE (dimct),
4020 size_one_node)),
4021 build_binary_op (NE_EXPR, boolean_type_node,
4022 build2 (BIT_AND_EXPR,
4023 TREE_TYPE (aflags),
4024 aflags, u),
4025 u));
4026 /* There is already a template in the descriptor and it is
4027 located at the start of block 3 (12th field). */
4028 t = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (t))));
4029 template_tree
4030 = build3 (COMPONENT_REF, TREE_TYPE (t), desc, t, NULL_TREE);
4031 template_tree = build3 (COND_EXPR, TREE_TYPE (t), u,
4032 build_call_raise (CE_Length_Check_Failed, Empty,
4033 N_Raise_Constraint_Error),
4034 template_tree);
4035 template_addr
4036 = build_unary_op (ADDR_EXPR, p_bounds_type, template_tree);
4037 break;
4038
4039 case 10: /* Class NCA */
4040 default:
4041 post_error ("unsupported descriptor type for &", gnat_subprog);
4042 template_addr = integer_zero_node;
4043 break;
4044 }
4045
4046 /* Build the fat pointer in the form of a constructor. */
4047 v = VEC_alloc (constructor_elt, gc, 2);
4048 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (gnu_type), gnu_expr32);
4049 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (gnu_type)),
4050 template_addr);
4051
4052 return gnat_build_constructor (gnu_type, v);
4053 }
4054
4055 else
4056 gcc_unreachable ();
4057 }
4058
4059 /* Convert GNU_EXPR, a pointer to a VMS descriptor, to GNU_TYPE, a regular
4060 pointer or fat pointer type. GNU_EXPR_ALT_TYPE is the alternate (32-bit)
4061 pointer type of GNU_EXPR. BY_REF is true if the result is to be used by
4062 reference. GNAT_SUBPROG is the subprogram to which the VMS descriptor is
4063 passed. */
4064
4065 tree
4066 convert_vms_descriptor (tree gnu_type, tree gnu_expr, tree gnu_expr_alt_type,
4067 bool by_ref, Entity_Id gnat_subprog)
4068 {
4069 tree desc_type = TREE_TYPE (TREE_TYPE (gnu_expr));
4070 tree desc = build1 (INDIRECT_REF, desc_type, gnu_expr);
4071 tree mbo = TYPE_FIELDS (desc_type);
4072 const char *mbostr = IDENTIFIER_POINTER (DECL_NAME (mbo));
4073 tree mbmo = DECL_CHAIN (DECL_CHAIN (DECL_CHAIN (mbo)));
4074 tree real_type, is64bit, gnu_expr32, gnu_expr64;
4075
4076 if (by_ref)
4077 real_type = TREE_TYPE (gnu_type);
4078 else
4079 real_type = gnu_type;
4080
4081 /* If the field name is not MBO, it must be 32-bit and no alternate.
4082 Otherwise primary must be 64-bit and alternate 32-bit. */
4083 if (strcmp (mbostr, "MBO") != 0)
4084 {
4085 tree ret = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4086 if (by_ref)
4087 ret = build_unary_op (ADDR_EXPR, gnu_type, ret);
4088 return ret;
4089 }
4090
4091 /* Build the test for 64-bit descriptor. */
4092 mbo = build3 (COMPONENT_REF, TREE_TYPE (mbo), desc, mbo, NULL_TREE);
4093 mbmo = build3 (COMPONENT_REF, TREE_TYPE (mbmo), desc, mbmo, NULL_TREE);
4094 is64bit
4095 = build_binary_op (TRUTH_ANDIF_EXPR, boolean_type_node,
4096 build_binary_op (EQ_EXPR, boolean_type_node,
4097 convert (integer_type_node, mbo),
4098 integer_one_node),
4099 build_binary_op (EQ_EXPR, boolean_type_node,
4100 convert (integer_type_node, mbmo),
4101 integer_minus_one_node));
4102
4103 /* Build the 2 possible end results. */
4104 gnu_expr64 = convert_vms_descriptor64 (real_type, gnu_expr, gnat_subprog);
4105 if (by_ref)
4106 gnu_expr64 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr64);
4107 gnu_expr = fold_convert (gnu_expr_alt_type, gnu_expr);
4108 gnu_expr32 = convert_vms_descriptor32 (real_type, gnu_expr, gnat_subprog);
4109 if (by_ref)
4110 gnu_expr32 = build_unary_op (ADDR_EXPR, gnu_type, gnu_expr32);
4111
4112 return build3 (COND_EXPR, gnu_type, is64bit, gnu_expr64, gnu_expr32);
4113 }
4114 \f
4115 /* Build a type to be used to represent an aliased object whose nominal type
4116 is an unconstrained array. This consists of a RECORD_TYPE containing a
4117 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
4118 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
4119 an arbitrary unconstrained object. Use NAME as the name of the record.
4120 DEBUG_INFO_P is true if we need to write debug information for the type. */
4121
4122 tree
4123 build_unc_object_type (tree template_type, tree object_type, tree name,
4124 bool debug_info_p)
4125 {
4126 tree type = make_node (RECORD_TYPE);
4127 tree template_field
4128 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
4129 NULL_TREE, NULL_TREE, 0, 1);
4130 tree array_field
4131 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
4132 NULL_TREE, NULL_TREE, 0, 1);
4133
4134 TYPE_NAME (type) = name;
4135 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
4136 DECL_CHAIN (template_field) = array_field;
4137 finish_record_type (type, template_field, 0, true);
4138
4139 /* Declare it now since it will never be declared otherwise. This is
4140 necessary to ensure that its subtrees are properly marked. */
4141 create_type_decl (name, type, NULL, true, debug_info_p, Empty);
4142
4143 return type;
4144 }
4145
4146 /* Same, taking a thin or fat pointer type instead of a template type. */
4147
4148 tree
4149 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
4150 tree name, bool debug_info_p)
4151 {
4152 tree template_type;
4153
4154 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
4155
4156 template_type
4157 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
4158 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
4159 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
4160
4161 return
4162 build_unc_object_type (template_type, object_type, name, debug_info_p);
4163 }
4164 \f
4165 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
4166 In the normal case this is just two adjustments, but we have more to
4167 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
4168
4169 void
4170 update_pointer_to (tree old_type, tree new_type)
4171 {
4172 tree ptr = TYPE_POINTER_TO (old_type);
4173 tree ref = TYPE_REFERENCE_TO (old_type);
4174 tree t;
4175
4176 /* If this is the main variant, process all the other variants first. */
4177 if (TYPE_MAIN_VARIANT (old_type) == old_type)
4178 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
4179 update_pointer_to (t, new_type);
4180
4181 /* If no pointers and no references, we are done. */
4182 if (!ptr && !ref)
4183 return;
4184
4185 /* Merge the old type qualifiers in the new type.
4186
4187 Each old variant has qualifiers for specific reasons, and the new
4188 designated type as well. Each set of qualifiers represents useful
4189 information grabbed at some point, and merging the two simply unifies
4190 these inputs into the final type description.
4191
4192 Consider for instance a volatile type frozen after an access to constant
4193 type designating it; after the designated type's freeze, we get here with
4194 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
4195 when the access type was processed. We will make a volatile and readonly
4196 designated type, because that's what it really is.
4197
4198 We might also get here for a non-dummy OLD_TYPE variant with different
4199 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
4200 to private record type elaboration (see the comments around the call to
4201 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
4202 the qualifiers in those cases too, to avoid accidentally discarding the
4203 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
4204 new_type
4205 = build_qualified_type (new_type,
4206 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
4207
4208 /* If old type and new type are identical, there is nothing to do. */
4209 if (old_type == new_type)
4210 return;
4211
4212 /* Otherwise, first handle the simple case. */
4213 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
4214 {
4215 tree new_ptr, new_ref;
4216
4217 /* If pointer or reference already points to new type, nothing to do.
4218 This can happen as update_pointer_to can be invoked multiple times
4219 on the same couple of types because of the type variants. */
4220 if ((ptr && TREE_TYPE (ptr) == new_type)
4221 || (ref && TREE_TYPE (ref) == new_type))
4222 return;
4223
4224 /* Chain PTR and its variants at the end. */
4225 new_ptr = TYPE_POINTER_TO (new_type);
4226 if (new_ptr)
4227 {
4228 while (TYPE_NEXT_PTR_TO (new_ptr))
4229 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
4230 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
4231 }
4232 else
4233 TYPE_POINTER_TO (new_type) = ptr;
4234
4235 /* Now adjust them. */
4236 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
4237 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
4238 {
4239 TREE_TYPE (t) = new_type;
4240 if (TYPE_NULL_BOUNDS (t))
4241 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
4242 }
4243
4244 /* Chain REF and its variants at the end. */
4245 new_ref = TYPE_REFERENCE_TO (new_type);
4246 if (new_ref)
4247 {
4248 while (TYPE_NEXT_REF_TO (new_ref))
4249 new_ref = TYPE_NEXT_REF_TO (new_ref);
4250 TYPE_NEXT_REF_TO (new_ref) = ref;
4251 }
4252 else
4253 TYPE_REFERENCE_TO (new_type) = ref;
4254
4255 /* Now adjust them. */
4256 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
4257 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
4258 TREE_TYPE (t) = new_type;
4259
4260 TYPE_POINTER_TO (old_type) = NULL_TREE;
4261 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
4262 }
4263
4264 /* Now deal with the unconstrained array case. In this case the pointer
4265 is actually a record where both fields are pointers to dummy nodes.
4266 Turn them into pointers to the correct types using update_pointer_to.
4267 Likewise for the pointer to the object record (thin pointer). */
4268 else
4269 {
4270 tree new_ptr = TYPE_POINTER_TO (new_type);
4271
4272 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
4273
4274 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
4275 since update_pointer_to can be invoked multiple times on the same
4276 couple of types because of the type variants. */
4277 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
4278 return;
4279
4280 update_pointer_to
4281 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
4282 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
4283
4284 update_pointer_to
4285 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
4286 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
4287
4288 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
4289 TYPE_OBJECT_RECORD_TYPE (new_type));
4290
4291 TYPE_POINTER_TO (old_type) = NULL_TREE;
4292 }
4293 }
4294 \f
4295 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
4296 unconstrained one. This involves making or finding a template. */
4297
4298 static tree
4299 convert_to_fat_pointer (tree type, tree expr)
4300 {
4301 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
4302 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
4303 tree etype = TREE_TYPE (expr);
4304 tree template_tree;
4305 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
4306
4307 /* If EXPR is null, make a fat pointer that contains a null pointer to the
4308 array (compare_fat_pointers ensures that this is the full discriminant)
4309 and a valid pointer to the bounds. This latter property is necessary
4310 since the compiler can hoist the load of the bounds done through it. */
4311 if (integer_zerop (expr))
4312 {
4313 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4314 tree null_bounds, t;
4315
4316 if (TYPE_NULL_BOUNDS (ptr_template_type))
4317 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
4318 else
4319 {
4320 /* The template type can still be dummy at this point so we build an
4321 empty constructor. The middle-end will fill it in with zeros. */
4322 t = build_constructor (template_type, NULL);
4323 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
4324 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
4325 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
4326 }
4327
4328 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4329 fold_convert (p_array_type, null_pointer_node));
4330 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
4331 t = build_constructor (type, v);
4332 /* Do not set TREE_CONSTANT so as to force T to static memory. */
4333 TREE_CONSTANT (t) = 0;
4334 TREE_STATIC (t) = 1;
4335
4336 return t;
4337 }
4338
4339 /* If EXPR is a thin pointer, make template and data from the record. */
4340 if (TYPE_IS_THIN_POINTER_P (etype))
4341 {
4342 tree field = TYPE_FIELDS (TREE_TYPE (etype));
4343
4344 expr = gnat_protect_expr (expr);
4345 if (TREE_CODE (expr) == ADDR_EXPR)
4346 expr = TREE_OPERAND (expr, 0);
4347 else
4348 {
4349 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
4350 the thin pointer value has been shifted so we first need to shift
4351 it back to get the template address. */
4352 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
4353 expr
4354 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
4355 fold_build1 (NEGATE_EXPR, sizetype,
4356 byte_position
4357 (DECL_CHAIN (field))));
4358 expr = build1 (INDIRECT_REF, TREE_TYPE (etype), expr);
4359 }
4360
4361 template_tree = build_component_ref (expr, NULL_TREE, field, false);
4362 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
4363 build_component_ref (expr, NULL_TREE,
4364 DECL_CHAIN (field), false));
4365 }
4366
4367 /* Otherwise, build the constructor for the template. */
4368 else
4369 template_tree = build_template (template_type, TREE_TYPE (etype), expr);
4370
4371 /* The final result is a constructor for the fat pointer.
4372
4373 If EXPR is an argument of a foreign convention subprogram, the type it
4374 points to is directly the component type. In this case, the expression
4375 type may not match the corresponding FIELD_DECL type at this point, so we
4376 call "convert" here to fix that up if necessary. This type consistency is
4377 required, for instance because it ensures that possible later folding of
4378 COMPONENT_REFs against this constructor always yields something of the
4379 same type as the initial reference.
4380
4381 Note that the call to "build_template" above is still fine because it
4382 will only refer to the provided TEMPLATE_TYPE in this case. */
4383 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4384 convert (p_array_type, expr));
4385 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4386 build_unary_op (ADDR_EXPR, NULL_TREE,
4387 template_tree));
4388 return gnat_build_constructor (type, v);
4389 }
4390 \f
4391 /* Create an expression whose value is that of EXPR,
4392 converted to type TYPE. The TREE_TYPE of the value
4393 is always TYPE. This function implements all reasonable
4394 conversions; callers should filter out those that are
4395 not permitted by the language being compiled. */
4396
4397 tree
4398 convert (tree type, tree expr)
4399 {
4400 tree etype = TREE_TYPE (expr);
4401 enum tree_code ecode = TREE_CODE (etype);
4402 enum tree_code code = TREE_CODE (type);
4403
4404 /* If the expression is already of the right type, we are done. */
4405 if (etype == type)
4406 return expr;
4407
4408 /* If both input and output have padding and are of variable size, do this
4409 as an unchecked conversion. Likewise if one is a mere variant of the
4410 other, so we avoid a pointless unpad/repad sequence. */
4411 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
4412 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4413 && (!TREE_CONSTANT (TYPE_SIZE (type))
4414 || !TREE_CONSTANT (TYPE_SIZE (etype))
4415 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
4416 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
4417 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
4418 ;
4419
4420 /* If the output type has padding, convert to the inner type and make a
4421 constructor to build the record, unless a variable size is involved. */
4422 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
4423 {
4424 VEC(constructor_elt,gc) *v;
4425
4426 /* If we previously converted from another type and our type is
4427 of variable size, remove the conversion to avoid the need for
4428 variable-sized temporaries. Likewise for a conversion between
4429 original and packable version. */
4430 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
4431 && (!TREE_CONSTANT (TYPE_SIZE (type))
4432 || (ecode == RECORD_TYPE
4433 && TYPE_NAME (etype)
4434 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
4435 expr = TREE_OPERAND (expr, 0);
4436
4437 /* If we are just removing the padding from expr, convert the original
4438 object if we have variable size in order to avoid the need for some
4439 variable-sized temporaries. Likewise if the padding is a variant
4440 of the other, so we avoid a pointless unpad/repad sequence. */
4441 if (TREE_CODE (expr) == COMPONENT_REF
4442 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
4443 && (!TREE_CONSTANT (TYPE_SIZE (type))
4444 || TYPE_MAIN_VARIANT (type)
4445 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
4446 || (ecode == RECORD_TYPE
4447 && TYPE_NAME (etype)
4448 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4449 return convert (type, TREE_OPERAND (expr, 0));
4450
4451 /* If the inner type is of self-referential size and the expression type
4452 is a record, do this as an unchecked conversion. But first pad the
4453 expression if possible to have the same size on both sides. */
4454 if (ecode == RECORD_TYPE
4455 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4456 {
4457 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4458 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4459 false, false, false, true),
4460 expr);
4461 return unchecked_convert (type, expr, false);
4462 }
4463
4464 /* If we are converting between array types with variable size, do the
4465 final conversion as an unchecked conversion, again to avoid the need
4466 for some variable-sized temporaries. If valid, this conversion is
4467 very likely purely technical and without real effects. */
4468 if (ecode == ARRAY_TYPE
4469 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4470 && !TREE_CONSTANT (TYPE_SIZE (etype))
4471 && !TREE_CONSTANT (TYPE_SIZE (type)))
4472 return unchecked_convert (type,
4473 convert (TREE_TYPE (TYPE_FIELDS (type)),
4474 expr),
4475 false);
4476
4477 v = VEC_alloc (constructor_elt, gc, 1);
4478 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4479 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4480 return gnat_build_constructor (type, v);
4481 }
4482
4483 /* If the input type has padding, remove it and convert to the output type.
4484 The conditions ordering is arranged to ensure that the output type is not
4485 a padding type here, as it is not clear whether the conversion would
4486 always be correct if this was to happen. */
4487 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4488 {
4489 tree unpadded;
4490
4491 /* If we have just converted to this padded type, just get the
4492 inner expression. */
4493 if (TREE_CODE (expr) == CONSTRUCTOR
4494 && !VEC_empty (constructor_elt, CONSTRUCTOR_ELTS (expr))
4495 && VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).index
4496 == TYPE_FIELDS (etype))
4497 unpadded
4498 = VEC_index (constructor_elt, CONSTRUCTOR_ELTS (expr), 0).value;
4499
4500 /* Otherwise, build an explicit component reference. */
4501 else
4502 unpadded
4503 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4504
4505 return convert (type, unpadded);
4506 }
4507
4508 /* If the input is a biased type, adjust first. */
4509 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4510 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4511 fold_convert (TREE_TYPE (etype),
4512 expr),
4513 TYPE_MIN_VALUE (etype)));
4514
4515 /* If the input is a justified modular type, we need to extract the actual
4516 object before converting it to any other type with the exceptions of an
4517 unconstrained array or of a mere type variant. It is useful to avoid the
4518 extraction and conversion in the type variant case because it could end
4519 up replacing a VAR_DECL expr by a constructor and we might be about the
4520 take the address of the result. */
4521 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4522 && code != UNCONSTRAINED_ARRAY_TYPE
4523 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4524 return convert (type, build_component_ref (expr, NULL_TREE,
4525 TYPE_FIELDS (etype), false));
4526
4527 /* If converting to a type that contains a template, convert to the data
4528 type and then build the template. */
4529 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4530 {
4531 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4532 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 2);
4533
4534 /* If the source already has a template, get a reference to the
4535 associated array only, as we are going to rebuild a template
4536 for the target type anyway. */
4537 expr = maybe_unconstrained_array (expr);
4538
4539 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4540 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4541 obj_type, NULL_TREE));
4542 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4543 convert (obj_type, expr));
4544 return gnat_build_constructor (type, v);
4545 }
4546
4547 /* There are some cases of expressions that we process specially. */
4548 switch (TREE_CODE (expr))
4549 {
4550 case ERROR_MARK:
4551 return expr;
4552
4553 case NULL_EXPR:
4554 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4555 conversion in gnat_expand_expr. NULL_EXPR does not represent
4556 and actual value, so no conversion is needed. */
4557 expr = copy_node (expr);
4558 TREE_TYPE (expr) = type;
4559 return expr;
4560
4561 case STRING_CST:
4562 /* If we are converting a STRING_CST to another constrained array type,
4563 just make a new one in the proper type. */
4564 if (code == ecode && AGGREGATE_TYPE_P (etype)
4565 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4566 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4567 {
4568 expr = copy_node (expr);
4569 TREE_TYPE (expr) = type;
4570 return expr;
4571 }
4572 break;
4573
4574 case VECTOR_CST:
4575 /* If we are converting a VECTOR_CST to a mere variant type, just make
4576 a new one in the proper type. */
4577 if (code == ecode && gnat_types_compatible_p (type, etype))
4578 {
4579 expr = copy_node (expr);
4580 TREE_TYPE (expr) = type;
4581 return expr;
4582 }
4583
4584 case CONSTRUCTOR:
4585 /* If we are converting a CONSTRUCTOR to a mere variant type, just make
4586 a new one in the proper type. */
4587 if (code == ecode && gnat_types_compatible_p (type, etype))
4588 {
4589 expr = copy_node (expr);
4590 TREE_TYPE (expr) = type;
4591 CONSTRUCTOR_ELTS (expr)
4592 = VEC_copy (constructor_elt, gc, CONSTRUCTOR_ELTS (expr));
4593 return expr;
4594 }
4595
4596 /* Likewise for a conversion between original and packable version, or
4597 conversion between types of the same size and with the same list of
4598 fields, but we have to work harder to preserve type consistency. */
4599 if (code == ecode
4600 && code == RECORD_TYPE
4601 && (TYPE_NAME (type) == TYPE_NAME (etype)
4602 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4603
4604 {
4605 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4606 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4607 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, len);
4608 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4609 unsigned HOST_WIDE_INT idx;
4610 tree index, value;
4611
4612 /* Whether we need to clear TREE_CONSTANT et al. on the output
4613 constructor when we convert in place. */
4614 bool clear_constant = false;
4615
4616 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4617 {
4618 /* We expect only simple constructors. */
4619 if (!SAME_FIELD_P (index, efield))
4620 break;
4621 /* The field must be the same. */
4622 if (!SAME_FIELD_P (efield, field))
4623 break;
4624 constructor_elt elt = {field, convert (TREE_TYPE (field), value)};
4625 VEC_quick_push (constructor_elt, v, elt);
4626
4627 /* If packing has made this field a bitfield and the input
4628 value couldn't be emitted statically any more, we need to
4629 clear TREE_CONSTANT on our output. */
4630 if (!clear_constant
4631 && TREE_CONSTANT (expr)
4632 && !CONSTRUCTOR_BITFIELD_P (efield)
4633 && CONSTRUCTOR_BITFIELD_P (field)
4634 && !initializer_constant_valid_for_bitfield_p (value))
4635 clear_constant = true;
4636
4637 efield = DECL_CHAIN (efield);
4638 field = DECL_CHAIN (field);
4639 }
4640
4641 /* If we have been able to match and convert all the input fields
4642 to their output type, convert in place now. We'll fallback to a
4643 view conversion downstream otherwise. */
4644 if (idx == len)
4645 {
4646 expr = copy_node (expr);
4647 TREE_TYPE (expr) = type;
4648 CONSTRUCTOR_ELTS (expr) = v;
4649 if (clear_constant)
4650 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4651 return expr;
4652 }
4653 }
4654
4655 /* Likewise for a conversion between array type and vector type with a
4656 compatible representative array. */
4657 else if (code == VECTOR_TYPE
4658 && ecode == ARRAY_TYPE
4659 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4660 etype))
4661 {
4662 VEC(constructor_elt,gc) *e = CONSTRUCTOR_ELTS (expr);
4663 unsigned HOST_WIDE_INT len = VEC_length (constructor_elt, e);
4664 VEC(constructor_elt,gc) *v;
4665 unsigned HOST_WIDE_INT ix;
4666 tree value;
4667
4668 /* Build a VECTOR_CST from a *constant* array constructor. */
4669 if (TREE_CONSTANT (expr))
4670 {
4671 bool constant_p = true;
4672
4673 /* Iterate through elements and check if all constructor
4674 elements are *_CSTs. */
4675 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4676 if (!CONSTANT_CLASS_P (value))
4677 {
4678 constant_p = false;
4679 break;
4680 }
4681
4682 if (constant_p)
4683 return build_vector_from_ctor (type,
4684 CONSTRUCTOR_ELTS (expr));
4685 }
4686
4687 /* Otherwise, build a regular vector constructor. */
4688 v = VEC_alloc (constructor_elt, gc, len);
4689 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4690 {
4691 constructor_elt elt = {NULL_TREE, value};
4692 VEC_quick_push (constructor_elt, v, elt);
4693 }
4694 expr = copy_node (expr);
4695 TREE_TYPE (expr) = type;
4696 CONSTRUCTOR_ELTS (expr) = v;
4697 return expr;
4698 }
4699 break;
4700
4701 case UNCONSTRAINED_ARRAY_REF:
4702 /* First retrieve the underlying array. */
4703 expr = maybe_unconstrained_array (expr);
4704 etype = TREE_TYPE (expr);
4705 ecode = TREE_CODE (etype);
4706 break;
4707
4708 case VIEW_CONVERT_EXPR:
4709 {
4710 /* GCC 4.x is very sensitive to type consistency overall, and view
4711 conversions thus are very frequent. Even though just "convert"ing
4712 the inner operand to the output type is fine in most cases, it
4713 might expose unexpected input/output type mismatches in special
4714 circumstances so we avoid such recursive calls when we can. */
4715 tree op0 = TREE_OPERAND (expr, 0);
4716
4717 /* If we are converting back to the original type, we can just
4718 lift the input conversion. This is a common occurrence with
4719 switches back-and-forth amongst type variants. */
4720 if (type == TREE_TYPE (op0))
4721 return op0;
4722
4723 /* Otherwise, if we're converting between two aggregate or vector
4724 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4725 target type in place or to just convert the inner expression. */
4726 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4727 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4728 {
4729 /* If we are converting between mere variants, we can just
4730 substitute the VIEW_CONVERT_EXPR in place. */
4731 if (gnat_types_compatible_p (type, etype))
4732 return build1 (VIEW_CONVERT_EXPR, type, op0);
4733
4734 /* Otherwise, we may just bypass the input view conversion unless
4735 one of the types is a fat pointer, which is handled by
4736 specialized code below which relies on exact type matching. */
4737 else if (!TYPE_IS_FAT_POINTER_P (type)
4738 && !TYPE_IS_FAT_POINTER_P (etype))
4739 return convert (type, op0);
4740 }
4741
4742 break;
4743 }
4744
4745 default:
4746 break;
4747 }
4748
4749 /* Check for converting to a pointer to an unconstrained array. */
4750 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4751 return convert_to_fat_pointer (type, expr);
4752
4753 /* If we are converting between two aggregate or vector types that are mere
4754 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4755 to a vector type from its representative array type. */
4756 else if ((code == ecode
4757 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4758 && gnat_types_compatible_p (type, etype))
4759 || (code == VECTOR_TYPE
4760 && ecode == ARRAY_TYPE
4761 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4762 etype)))
4763 return build1 (VIEW_CONVERT_EXPR, type, expr);
4764
4765 /* If we are converting between tagged types, try to upcast properly. */
4766 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4767 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4768 {
4769 tree child_etype = etype;
4770 do {
4771 tree field = TYPE_FIELDS (child_etype);
4772 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4773 return build_component_ref (expr, NULL_TREE, field, false);
4774 child_etype = TREE_TYPE (field);
4775 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4776 }
4777
4778 /* If we are converting from a smaller form of record type back to it, just
4779 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4780 size on both sides. */
4781 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4782 && smaller_form_type_p (etype, type))
4783 {
4784 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4785 false, false, false, true),
4786 expr);
4787 return build1 (VIEW_CONVERT_EXPR, type, expr);
4788 }
4789
4790 /* In all other cases of related types, make a NOP_EXPR. */
4791 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4792 return fold_convert (type, expr);
4793
4794 switch (code)
4795 {
4796 case VOID_TYPE:
4797 return fold_build1 (CONVERT_EXPR, type, expr);
4798
4799 case INTEGER_TYPE:
4800 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4801 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4802 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4803 return unchecked_convert (type, expr, false);
4804 else if (TYPE_BIASED_REPRESENTATION_P (type))
4805 return fold_convert (type,
4806 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4807 convert (TREE_TYPE (type), expr),
4808 TYPE_MIN_VALUE (type)));
4809
4810 /* ... fall through ... */
4811
4812 case ENUMERAL_TYPE:
4813 case BOOLEAN_TYPE:
4814 /* If we are converting an additive expression to an integer type
4815 with lower precision, be wary of the optimization that can be
4816 applied by convert_to_integer. There are 2 problematic cases:
4817 - if the first operand was originally of a biased type,
4818 because we could be recursively called to convert it
4819 to an intermediate type and thus rematerialize the
4820 additive operator endlessly,
4821 - if the expression contains a placeholder, because an
4822 intermediate conversion that changes the sign could
4823 be inserted and thus introduce an artificial overflow
4824 at compile time when the placeholder is substituted. */
4825 if (code == INTEGER_TYPE
4826 && ecode == INTEGER_TYPE
4827 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4828 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4829 {
4830 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4831
4832 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4833 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4834 || CONTAINS_PLACEHOLDER_P (expr))
4835 return build1 (NOP_EXPR, type, expr);
4836 }
4837
4838 return fold (convert_to_integer (type, expr));
4839
4840 case POINTER_TYPE:
4841 case REFERENCE_TYPE:
4842 /* If converting between two thin pointers, adjust if needed to account
4843 for differing offsets from the base pointer, depending on whether
4844 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4845 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4846 {
4847 tree etype_pos
4848 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4849 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4850 : size_zero_node;
4851 tree type_pos
4852 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4853 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4854 : size_zero_node;
4855 tree byte_diff = size_diffop (type_pos, etype_pos);
4856
4857 expr = build1 (NOP_EXPR, type, expr);
4858 if (integer_zerop (byte_diff))
4859 return expr;
4860
4861 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4862 fold_convert (sizetype, byte_diff));
4863 }
4864
4865 /* If converting fat pointer to normal or thin pointer, get the pointer
4866 to the array and then convert it. */
4867 if (TYPE_IS_FAT_POINTER_P (etype))
4868 expr
4869 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4870
4871 return fold (convert_to_pointer (type, expr));
4872
4873 case REAL_TYPE:
4874 return fold (convert_to_real (type, expr));
4875
4876 case RECORD_TYPE:
4877 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4878 {
4879 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
4880
4881 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4882 convert (TREE_TYPE (TYPE_FIELDS (type)),
4883 expr));
4884 return gnat_build_constructor (type, v);
4885 }
4886
4887 /* ... fall through ... */
4888
4889 case ARRAY_TYPE:
4890 /* In these cases, assume the front-end has validated the conversion.
4891 If the conversion is valid, it will be a bit-wise conversion, so
4892 it can be viewed as an unchecked conversion. */
4893 return unchecked_convert (type, expr, false);
4894
4895 case UNION_TYPE:
4896 /* This is a either a conversion between a tagged type and some
4897 subtype, which we have to mark as a UNION_TYPE because of
4898 overlapping fields or a conversion of an Unchecked_Union. */
4899 return unchecked_convert (type, expr, false);
4900
4901 case UNCONSTRAINED_ARRAY_TYPE:
4902 /* If the input is a VECTOR_TYPE, convert to the representative
4903 array type first. */
4904 if (ecode == VECTOR_TYPE)
4905 {
4906 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4907 etype = TREE_TYPE (expr);
4908 ecode = TREE_CODE (etype);
4909 }
4910
4911 /* If EXPR is a constrained array, take its address, convert it to a
4912 fat pointer, and then dereference it. Likewise if EXPR is a
4913 record containing both a template and a constrained array.
4914 Note that a record representing a justified modular type
4915 always represents a packed constrained array. */
4916 if (ecode == ARRAY_TYPE
4917 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4918 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4919 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4920 return
4921 build_unary_op
4922 (INDIRECT_REF, NULL_TREE,
4923 convert_to_fat_pointer (TREE_TYPE (type),
4924 build_unary_op (ADDR_EXPR,
4925 NULL_TREE, expr)));
4926
4927 /* Do something very similar for converting one unconstrained
4928 array to another. */
4929 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4930 return
4931 build_unary_op (INDIRECT_REF, NULL_TREE,
4932 convert (TREE_TYPE (type),
4933 build_unary_op (ADDR_EXPR,
4934 NULL_TREE, expr)));
4935 else
4936 gcc_unreachable ();
4937
4938 case COMPLEX_TYPE:
4939 return fold (convert_to_complex (type, expr));
4940
4941 default:
4942 gcc_unreachable ();
4943 }
4944 }
4945
4946 /* Create an expression whose value is that of EXPR converted to the common
4947 index type, which is sizetype. EXPR is supposed to be in the base type
4948 of the GNAT index type. Calling it is equivalent to doing
4949
4950 convert (sizetype, expr)
4951
4952 but we try to distribute the type conversion with the knowledge that EXPR
4953 cannot overflow in its type. This is a best-effort approach and we fall
4954 back to the above expression as soon as difficulties are encountered.
4955
4956 This is necessary to overcome issues that arise when the GNAT base index
4957 type and the GCC common index type (sizetype) don't have the same size,
4958 which is quite frequent on 64-bit architectures. In this case, and if
4959 the GNAT base index type is signed but the iteration type of the loop has
4960 been forced to unsigned, the loop scalar evolution engine cannot compute
4961 a simple evolution for the general induction variables associated with the
4962 array indices, because it will preserve the wrap-around semantics in the
4963 unsigned type of their "inner" part. As a result, many loop optimizations
4964 are blocked.
4965
4966 The solution is to use a special (basic) induction variable that is at
4967 least as large as sizetype, and to express the aforementioned general
4968 induction variables in terms of this induction variable, eliminating
4969 the problematic intermediate truncation to the GNAT base index type.
4970 This is possible as long as the original expression doesn't overflow
4971 and if the middle-end hasn't introduced artificial overflows in the
4972 course of the various simplification it can make to the expression. */
4973
4974 tree
4975 convert_to_index_type (tree expr)
4976 {
4977 enum tree_code code = TREE_CODE (expr);
4978 tree type = TREE_TYPE (expr);
4979
4980 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4981 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4982 if (TYPE_UNSIGNED (type) || !optimize)
4983 return convert (sizetype, expr);
4984
4985 switch (code)
4986 {
4987 case VAR_DECL:
4988 /* The main effect of the function: replace a loop parameter with its
4989 associated special induction variable. */
4990 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4991 expr = DECL_INDUCTION_VAR (expr);
4992 break;
4993
4994 CASE_CONVERT:
4995 {
4996 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4997 /* Bail out as soon as we suspect some sort of type frobbing. */
4998 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4999 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
5000 break;
5001 }
5002
5003 /* ... fall through ... */
5004
5005 case NON_LVALUE_EXPR:
5006 return fold_build1 (code, sizetype,
5007 convert_to_index_type (TREE_OPERAND (expr, 0)));
5008
5009 case PLUS_EXPR:
5010 case MINUS_EXPR:
5011 case MULT_EXPR:
5012 return fold_build2 (code, sizetype,
5013 convert_to_index_type (TREE_OPERAND (expr, 0)),
5014 convert_to_index_type (TREE_OPERAND (expr, 1)));
5015
5016 case COMPOUND_EXPR:
5017 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
5018 convert_to_index_type (TREE_OPERAND (expr, 1)));
5019
5020 case COND_EXPR:
5021 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
5022 convert_to_index_type (TREE_OPERAND (expr, 1)),
5023 convert_to_index_type (TREE_OPERAND (expr, 2)));
5024
5025 default:
5026 break;
5027 }
5028
5029 return convert (sizetype, expr);
5030 }
5031 \f
5032 /* Remove all conversions that are done in EXP. This includes converting
5033 from a padded type or to a justified modular type. If TRUE_ADDRESS
5034 is true, always return the address of the containing object even if
5035 the address is not bit-aligned. */
5036
5037 tree
5038 remove_conversions (tree exp, bool true_address)
5039 {
5040 switch (TREE_CODE (exp))
5041 {
5042 case CONSTRUCTOR:
5043 if (true_address
5044 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
5045 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
5046 return
5047 remove_conversions (VEC_index (constructor_elt,
5048 CONSTRUCTOR_ELTS (exp), 0).value,
5049 true);
5050 break;
5051
5052 case COMPONENT_REF:
5053 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
5054 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5055 break;
5056
5057 CASE_CONVERT:
5058 case VIEW_CONVERT_EXPR:
5059 case NON_LVALUE_EXPR:
5060 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
5061
5062 default:
5063 break;
5064 }
5065
5066 return exp;
5067 }
5068 \f
5069 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
5070 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
5071 likewise return an expression pointing to the underlying array. */
5072
5073 tree
5074 maybe_unconstrained_array (tree exp)
5075 {
5076 enum tree_code code = TREE_CODE (exp);
5077 tree type = TREE_TYPE (exp);
5078
5079 switch (TREE_CODE (type))
5080 {
5081 case UNCONSTRAINED_ARRAY_TYPE:
5082 if (code == UNCONSTRAINED_ARRAY_REF)
5083 {
5084 const bool read_only = TREE_READONLY (exp);
5085 const bool no_trap = TREE_THIS_NOTRAP (exp);
5086
5087 exp = TREE_OPERAND (exp, 0);
5088 type = TREE_TYPE (exp);
5089
5090 if (TREE_CODE (exp) == COND_EXPR)
5091 {
5092 tree op1
5093 = build_unary_op (INDIRECT_REF, NULL_TREE,
5094 build_component_ref (TREE_OPERAND (exp, 1),
5095 NULL_TREE,
5096 TYPE_FIELDS (type),
5097 false));
5098 tree op2
5099 = build_unary_op (INDIRECT_REF, NULL_TREE,
5100 build_component_ref (TREE_OPERAND (exp, 2),
5101 NULL_TREE,
5102 TYPE_FIELDS (type),
5103 false));
5104
5105 exp = build3 (COND_EXPR,
5106 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
5107 TREE_OPERAND (exp, 0), op1, op2);
5108 }
5109 else
5110 {
5111 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
5112 build_component_ref (exp, NULL_TREE,
5113 TYPE_FIELDS (type),
5114 false));
5115 TREE_READONLY (exp) = read_only;
5116 TREE_THIS_NOTRAP (exp) = no_trap;
5117 }
5118 }
5119
5120 else if (code == NULL_EXPR)
5121 exp = build1 (NULL_EXPR,
5122 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
5123 TREE_OPERAND (exp, 0));
5124 break;
5125
5126 case RECORD_TYPE:
5127 /* If this is a padded type and it contains a template, convert to the
5128 unpadded type first. */
5129 if (TYPE_PADDING_P (type)
5130 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
5131 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
5132 {
5133 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5134 type = TREE_TYPE (exp);
5135 }
5136
5137 if (TYPE_CONTAINS_TEMPLATE_P (type))
5138 {
5139 exp = build_component_ref (exp, NULL_TREE,
5140 DECL_CHAIN (TYPE_FIELDS (type)),
5141 false);
5142 type = TREE_TYPE (exp);
5143
5144 /* If the array type is padded, convert to the unpadded type. */
5145 if (TYPE_IS_PADDING_P (type))
5146 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
5147 }
5148 break;
5149
5150 default:
5151 break;
5152 }
5153
5154 return exp;
5155 }
5156 \f
5157 /* Return true if EXPR is an expression that can be folded as an operand
5158 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
5159
5160 static bool
5161 can_fold_for_view_convert_p (tree expr)
5162 {
5163 tree t1, t2;
5164
5165 /* The folder will fold NOP_EXPRs between integral types with the same
5166 precision (in the middle-end's sense). We cannot allow it if the
5167 types don't have the same precision in the Ada sense as well. */
5168 if (TREE_CODE (expr) != NOP_EXPR)
5169 return true;
5170
5171 t1 = TREE_TYPE (expr);
5172 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
5173
5174 /* Defer to the folder for non-integral conversions. */
5175 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
5176 return true;
5177
5178 /* Only fold conversions that preserve both precisions. */
5179 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
5180 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
5181 return true;
5182
5183 return false;
5184 }
5185
5186 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
5187 If NOTRUNC_P is true, truncation operations should be suppressed.
5188
5189 Special care is required with (source or target) integral types whose
5190 precision is not equal to their size, to make sure we fetch or assign
5191 the value bits whose location might depend on the endianness, e.g.
5192
5193 Rmsize : constant := 8;
5194 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
5195
5196 type Bit_Array is array (1 .. Rmsize) of Boolean;
5197 pragma Pack (Bit_Array);
5198
5199 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
5200
5201 Value : Int := 2#1000_0001#;
5202 Vbits : Bit_Array := To_Bit_Array (Value);
5203
5204 we expect the 8 bits at Vbits'Address to always contain Value, while
5205 their original location depends on the endianness, at Value'Address
5206 on a little-endian architecture but not on a big-endian one. */
5207
5208 tree
5209 unchecked_convert (tree type, tree expr, bool notrunc_p)
5210 {
5211 tree etype = TREE_TYPE (expr);
5212 enum tree_code ecode = TREE_CODE (etype);
5213 enum tree_code code = TREE_CODE (type);
5214 int c;
5215
5216 /* If the expression is already of the right type, we are done. */
5217 if (etype == type)
5218 return expr;
5219
5220 /* If both types types are integral just do a normal conversion.
5221 Likewise for a conversion to an unconstrained array. */
5222 if ((((INTEGRAL_TYPE_P (type)
5223 && !(code == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (type)))
5224 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
5225 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
5226 && ((INTEGRAL_TYPE_P (etype)
5227 && !(ecode == INTEGER_TYPE && TYPE_VAX_FLOATING_POINT_P (etype)))
5228 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
5229 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
5230 || code == UNCONSTRAINED_ARRAY_TYPE)
5231 {
5232 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
5233 {
5234 tree ntype = copy_type (etype);
5235 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
5236 TYPE_MAIN_VARIANT (ntype) = ntype;
5237 expr = build1 (NOP_EXPR, ntype, expr);
5238 }
5239
5240 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5241 {
5242 tree rtype = copy_type (type);
5243 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
5244 TYPE_MAIN_VARIANT (rtype) = rtype;
5245 expr = convert (rtype, expr);
5246 expr = build1 (NOP_EXPR, type, expr);
5247 }
5248 else
5249 expr = convert (type, expr);
5250 }
5251
5252 /* If we are converting to an integral type whose precision is not equal
5253 to its size, first unchecked convert to a record type that contains an
5254 field of the given precision. Then extract the field. */
5255 else if (INTEGRAL_TYPE_P (type)
5256 && TYPE_RM_SIZE (type)
5257 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5258 GET_MODE_BITSIZE (TYPE_MODE (type))))
5259 {
5260 tree rec_type = make_node (RECORD_TYPE);
5261 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
5262 tree field_type, field;
5263
5264 if (TYPE_UNSIGNED (type))
5265 field_type = make_unsigned_type (prec);
5266 else
5267 field_type = make_signed_type (prec);
5268 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
5269
5270 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5271 NULL_TREE, NULL_TREE, 1, 0);
5272
5273 TYPE_FIELDS (rec_type) = field;
5274 layout_type (rec_type);
5275
5276 expr = unchecked_convert (rec_type, expr, notrunc_p);
5277 expr = build_component_ref (expr, NULL_TREE, field, false);
5278 expr = fold_build1 (NOP_EXPR, type, expr);
5279 }
5280
5281 /* Similarly if we are converting from an integral type whose precision is
5282 not equal to its size, first copy into a field of the given precision
5283 and unchecked convert the record type. */
5284 else if (INTEGRAL_TYPE_P (etype)
5285 && TYPE_RM_SIZE (etype)
5286 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
5287 GET_MODE_BITSIZE (TYPE_MODE (etype))))
5288 {
5289 tree rec_type = make_node (RECORD_TYPE);
5290 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
5291 VEC(constructor_elt,gc) *v = VEC_alloc (constructor_elt, gc, 1);
5292 tree field_type, field;
5293
5294 if (TYPE_UNSIGNED (etype))
5295 field_type = make_unsigned_type (prec);
5296 else
5297 field_type = make_signed_type (prec);
5298 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
5299
5300 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
5301 NULL_TREE, NULL_TREE, 1, 0);
5302
5303 TYPE_FIELDS (rec_type) = field;
5304 layout_type (rec_type);
5305
5306 expr = fold_build1 (NOP_EXPR, field_type, expr);
5307 CONSTRUCTOR_APPEND_ELT (v, field, expr);
5308 expr = gnat_build_constructor (rec_type, v);
5309 expr = unchecked_convert (type, expr, notrunc_p);
5310 }
5311
5312 /* If we are converting from a scalar type to a type with a different size,
5313 we need to pad to have the same size on both sides.
5314
5315 ??? We cannot do it unconditionally because unchecked conversions are
5316 used liberally by the front-end to implement polymorphism, e.g. in:
5317
5318 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
5319 return p___size__4 (p__object!(S191s.all));
5320
5321 so we skip all expressions that are references. */
5322 else if (!REFERENCE_CLASS_P (expr)
5323 && !AGGREGATE_TYPE_P (etype)
5324 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
5325 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
5326 {
5327 if (c < 0)
5328 {
5329 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
5330 false, false, false, true),
5331 expr);
5332 expr = unchecked_convert (type, expr, notrunc_p);
5333 }
5334 else
5335 {
5336 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
5337 false, false, false, true);
5338 expr = unchecked_convert (rec_type, expr, notrunc_p);
5339 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
5340 false);
5341 }
5342 }
5343
5344 /* We have a special case when we are converting between two unconstrained
5345 array types. In that case, take the address, convert the fat pointer
5346 types, and dereference. */
5347 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
5348 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
5349 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
5350 build_unary_op (ADDR_EXPR, NULL_TREE,
5351 expr)));
5352
5353 /* Another special case is when we are converting to a vector type from its
5354 representative array type; this a regular conversion. */
5355 else if (code == VECTOR_TYPE
5356 && ecode == ARRAY_TYPE
5357 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
5358 etype))
5359 expr = convert (type, expr);
5360
5361 else
5362 {
5363 expr = maybe_unconstrained_array (expr);
5364 etype = TREE_TYPE (expr);
5365 ecode = TREE_CODE (etype);
5366 if (can_fold_for_view_convert_p (expr))
5367 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
5368 else
5369 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
5370 }
5371
5372 /* If the result is an integral type whose precision is not equal to its
5373 size, sign- or zero-extend the result. We need not do this if the input
5374 is an integral type of the same precision and signedness or if the output
5375 is a biased type or if both the input and output are unsigned. */
5376 if (!notrunc_p
5377 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
5378 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
5379 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
5380 GET_MODE_BITSIZE (TYPE_MODE (type)))
5381 && !(INTEGRAL_TYPE_P (etype)
5382 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
5383 && operand_equal_p (TYPE_RM_SIZE (type),
5384 (TYPE_RM_SIZE (etype) != 0
5385 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
5386 0))
5387 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
5388 {
5389 tree base_type
5390 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
5391 tree shift_expr
5392 = convert (base_type,
5393 size_binop (MINUS_EXPR,
5394 bitsize_int
5395 (GET_MODE_BITSIZE (TYPE_MODE (type))),
5396 TYPE_RM_SIZE (type)));
5397 expr
5398 = convert (type,
5399 build_binary_op (RSHIFT_EXPR, base_type,
5400 build_binary_op (LSHIFT_EXPR, base_type,
5401 convert (base_type, expr),
5402 shift_expr),
5403 shift_expr));
5404 }
5405
5406 /* An unchecked conversion should never raise Constraint_Error. The code
5407 below assumes that GCC's conversion routines overflow the same way that
5408 the underlying hardware does. This is probably true. In the rare case
5409 when it is false, we can rely on the fact that such conversions are
5410 erroneous anyway. */
5411 if (TREE_CODE (expr) == INTEGER_CST)
5412 TREE_OVERFLOW (expr) = 0;
5413
5414 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
5415 show no longer constant. */
5416 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5417 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5418 OEP_ONLY_CONST))
5419 TREE_CONSTANT (expr) = 0;
5420
5421 return expr;
5422 }
5423 \f
5424 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5425 the latter being a record type as predicated by Is_Record_Type. */
5426
5427 enum tree_code
5428 tree_code_for_record_type (Entity_Id gnat_type)
5429 {
5430 Node_Id component_list, component;
5431
5432 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5433 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5434 if (!Is_Unchecked_Union (gnat_type))
5435 return RECORD_TYPE;
5436
5437 gnat_type = Implementation_Base_Type (gnat_type);
5438 component_list
5439 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5440
5441 for (component = First_Non_Pragma (Component_Items (component_list));
5442 Present (component);
5443 component = Next_Non_Pragma (component))
5444 if (Ekind (Defining_Entity (component)) == E_Component)
5445 return RECORD_TYPE;
5446
5447 return UNION_TYPE;
5448 }
5449
5450 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5451 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5452 according to the presence of an alignment clause on the type or, if it
5453 is an array, on the component type. */
5454
5455 bool
5456 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5457 {
5458 gnat_type = Underlying_Type (gnat_type);
5459
5460 *align_clause = Present (Alignment_Clause (gnat_type));
5461
5462 if (Is_Array_Type (gnat_type))
5463 {
5464 gnat_type = Underlying_Type (Component_Type (gnat_type));
5465 if (Present (Alignment_Clause (gnat_type)))
5466 *align_clause = true;
5467 }
5468
5469 if (!Is_Floating_Point_Type (gnat_type))
5470 return false;
5471
5472 if (UI_To_Int (Esize (gnat_type)) != 64)
5473 return false;
5474
5475 return true;
5476 }
5477
5478 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5479 size is greater or equal to 64 bits, or an array of such a type. Set
5480 ALIGN_CLAUSE according to the presence of an alignment clause on the
5481 type or, if it is an array, on the component type. */
5482
5483 bool
5484 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5485 {
5486 gnat_type = Underlying_Type (gnat_type);
5487
5488 *align_clause = Present (Alignment_Clause (gnat_type));
5489
5490 if (Is_Array_Type (gnat_type))
5491 {
5492 gnat_type = Underlying_Type (Component_Type (gnat_type));
5493 if (Present (Alignment_Clause (gnat_type)))
5494 *align_clause = true;
5495 }
5496
5497 if (!Is_Scalar_Type (gnat_type))
5498 return false;
5499
5500 if (UI_To_Int (Esize (gnat_type)) < 64)
5501 return false;
5502
5503 return true;
5504 }
5505
5506 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5507 component of an aggregate type. */
5508
5509 bool
5510 type_for_nonaliased_component_p (tree gnu_type)
5511 {
5512 /* If the type is passed by reference, we may have pointers to the
5513 component so it cannot be made non-aliased. */
5514 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5515 return false;
5516
5517 /* We used to say that any component of aggregate type is aliased
5518 because the front-end may take 'Reference of it. The front-end
5519 has been enhanced in the meantime so as to use a renaming instead
5520 in most cases, but the back-end can probably take the address of
5521 such a component too so we go for the conservative stance.
5522
5523 For instance, we might need the address of any array type, even
5524 if normally passed by copy, to construct a fat pointer if the
5525 component is used as an actual for an unconstrained formal.
5526
5527 Likewise for record types: even if a specific record subtype is
5528 passed by copy, the parent type might be passed by ref (e.g. if
5529 it's of variable size) and we might take the address of a child
5530 component to pass to a parent formal. We have no way to check
5531 for such conditions here. */
5532 if (AGGREGATE_TYPE_P (gnu_type))
5533 return false;
5534
5535 return true;
5536 }
5537
5538 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5539
5540 bool
5541 smaller_form_type_p (tree type, tree orig_type)
5542 {
5543 tree size, osize;
5544
5545 /* We're not interested in variants here. */
5546 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5547 return false;
5548
5549 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5550 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5551 return false;
5552
5553 size = TYPE_SIZE (type);
5554 osize = TYPE_SIZE (orig_type);
5555
5556 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5557 return false;
5558
5559 return tree_int_cst_lt (size, osize) != 0;
5560 }
5561
5562 /* Perform final processing on global variables. */
5563
5564 static GTY (()) tree dummy_global;
5565
5566 void
5567 gnat_write_global_declarations (void)
5568 {
5569 unsigned int i;
5570 tree iter;
5571
5572 /* If we have declared types as used at the global level, insert them in
5573 the global hash table. We use a dummy variable for this purpose. */
5574 if (!VEC_empty (tree, types_used_by_cur_var_decl))
5575 {
5576 struct varpool_node *node;
5577 char *label;
5578
5579 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5580 dummy_global
5581 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5582 void_type_node);
5583 TREE_STATIC (dummy_global) = 1;
5584 TREE_ASM_WRITTEN (dummy_global) = 1;
5585 node = varpool_node (dummy_global);
5586 node->symbol.force_output = 1;
5587
5588 while (!VEC_empty (tree, types_used_by_cur_var_decl))
5589 {
5590 tree t = VEC_pop (tree, types_used_by_cur_var_decl);
5591 types_used_by_var_decl_insert (t, dummy_global);
5592 }
5593 }
5594
5595 /* Output debug information for all global type declarations first. This
5596 ensures that global types whose compilation hasn't been finalized yet,
5597 for example pointers to Taft amendment types, have their compilation
5598 finalized in the right context. */
5599 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
5600 if (TREE_CODE (iter) == TYPE_DECL)
5601 debug_hooks->global_decl (iter);
5602
5603 /* Proceed to optimize and emit assembly. */
5604 finalize_compilation_unit ();
5605
5606 /* After cgraph has had a chance to emit everything that's going to
5607 be emitted, output debug information for the rest of globals. */
5608 if (!seen_error ())
5609 {
5610 timevar_push (TV_SYMOUT);
5611 FOR_EACH_VEC_ELT (tree, global_decls, i, iter)
5612 if (TREE_CODE (iter) != TYPE_DECL)
5613 debug_hooks->global_decl (iter);
5614 timevar_pop (TV_SYMOUT);
5615 }
5616 }
5617
5618 /* ************************************************************************
5619 * * GCC builtins support *
5620 * ************************************************************************ */
5621
5622 /* The general scheme is fairly simple:
5623
5624 For each builtin function/type to be declared, gnat_install_builtins calls
5625 internal facilities which eventually get to gnat_push_decl, which in turn
5626 tracks the so declared builtin function decls in the 'builtin_decls' global
5627 datastructure. When an Intrinsic subprogram declaration is processed, we
5628 search this global datastructure to retrieve the associated BUILT_IN DECL
5629 node. */
5630
5631 /* Search the chain of currently available builtin declarations for a node
5632 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5633 found, if any, or NULL_TREE otherwise. */
5634 tree
5635 builtin_decl_for (tree name)
5636 {
5637 unsigned i;
5638 tree decl;
5639
5640 FOR_EACH_VEC_ELT (tree, builtin_decls, i, decl)
5641 if (DECL_NAME (decl) == name)
5642 return decl;
5643
5644 return NULL_TREE;
5645 }
5646
5647 /* The code below eventually exposes gnat_install_builtins, which declares
5648 the builtin types and functions we might need, either internally or as
5649 user accessible facilities.
5650
5651 ??? This is a first implementation shot, still in rough shape. It is
5652 heavily inspired from the "C" family implementation, with chunks copied
5653 verbatim from there.
5654
5655 Two obvious TODO candidates are
5656 o Use a more efficient name/decl mapping scheme
5657 o Devise a middle-end infrastructure to avoid having to copy
5658 pieces between front-ends. */
5659
5660 /* ----------------------------------------------------------------------- *
5661 * BUILTIN ELEMENTARY TYPES *
5662 * ----------------------------------------------------------------------- */
5663
5664 /* Standard data types to be used in builtin argument declarations. */
5665
5666 enum c_tree_index
5667 {
5668 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5669 CTI_STRING_TYPE,
5670 CTI_CONST_STRING_TYPE,
5671
5672 CTI_MAX
5673 };
5674
5675 static tree c_global_trees[CTI_MAX];
5676
5677 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5678 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5679 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5680
5681 /* ??? In addition some attribute handlers, we currently don't support a
5682 (small) number of builtin-types, which in turns inhibits support for a
5683 number of builtin functions. */
5684 #define wint_type_node void_type_node
5685 #define intmax_type_node void_type_node
5686 #define uintmax_type_node void_type_node
5687
5688 /* Build the void_list_node (void_type_node having been created). */
5689
5690 static tree
5691 build_void_list_node (void)
5692 {
5693 tree t = build_tree_list (NULL_TREE, void_type_node);
5694 return t;
5695 }
5696
5697 /* Used to help initialize the builtin-types.def table. When a type of
5698 the correct size doesn't exist, use error_mark_node instead of NULL.
5699 The later results in segfaults even when a decl using the type doesn't
5700 get invoked. */
5701
5702 static tree
5703 builtin_type_for_size (int size, bool unsignedp)
5704 {
5705 tree type = gnat_type_for_size (size, unsignedp);
5706 return type ? type : error_mark_node;
5707 }
5708
5709 /* Build/push the elementary type decls that builtin functions/types
5710 will need. */
5711
5712 static void
5713 install_builtin_elementary_types (void)
5714 {
5715 signed_size_type_node = gnat_signed_type (size_type_node);
5716 pid_type_node = integer_type_node;
5717 void_list_node = build_void_list_node ();
5718
5719 string_type_node = build_pointer_type (char_type_node);
5720 const_string_type_node
5721 = build_pointer_type (build_qualified_type
5722 (char_type_node, TYPE_QUAL_CONST));
5723 }
5724
5725 /* ----------------------------------------------------------------------- *
5726 * BUILTIN FUNCTION TYPES *
5727 * ----------------------------------------------------------------------- */
5728
5729 /* Now, builtin function types per se. */
5730
5731 enum c_builtin_type
5732 {
5733 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5734 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5735 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5736 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5737 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5738 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5739 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5740 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6) NAME,
5741 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7) NAME,
5742 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5743 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5744 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5745 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5746 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5747 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG6) \
5748 NAME,
5749 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5750 #include "builtin-types.def"
5751 #undef DEF_PRIMITIVE_TYPE
5752 #undef DEF_FUNCTION_TYPE_0
5753 #undef DEF_FUNCTION_TYPE_1
5754 #undef DEF_FUNCTION_TYPE_2
5755 #undef DEF_FUNCTION_TYPE_3
5756 #undef DEF_FUNCTION_TYPE_4
5757 #undef DEF_FUNCTION_TYPE_5
5758 #undef DEF_FUNCTION_TYPE_6
5759 #undef DEF_FUNCTION_TYPE_7
5760 #undef DEF_FUNCTION_TYPE_VAR_0
5761 #undef DEF_FUNCTION_TYPE_VAR_1
5762 #undef DEF_FUNCTION_TYPE_VAR_2
5763 #undef DEF_FUNCTION_TYPE_VAR_3
5764 #undef DEF_FUNCTION_TYPE_VAR_4
5765 #undef DEF_FUNCTION_TYPE_VAR_5
5766 #undef DEF_POINTER_TYPE
5767 BT_LAST
5768 };
5769
5770 typedef enum c_builtin_type builtin_type;
5771
5772 /* A temporary array used in communication with def_fn_type. */
5773 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5774
5775 /* A helper function for install_builtin_types. Build function type
5776 for DEF with return type RET and N arguments. If VAR is true, then the
5777 function should be variadic after those N arguments.
5778
5779 Takes special care not to ICE if any of the types involved are
5780 error_mark_node, which indicates that said type is not in fact available
5781 (see builtin_type_for_size). In which case the function type as a whole
5782 should be error_mark_node. */
5783
5784 static void
5785 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5786 {
5787 tree t;
5788 tree *args = XALLOCAVEC (tree, n);
5789 va_list list;
5790 int i;
5791
5792 va_start (list, n);
5793 for (i = 0; i < n; ++i)
5794 {
5795 builtin_type a = (builtin_type) va_arg (list, int);
5796 t = builtin_types[a];
5797 if (t == error_mark_node)
5798 goto egress;
5799 args[i] = t;
5800 }
5801
5802 t = builtin_types[ret];
5803 if (t == error_mark_node)
5804 goto egress;
5805 if (var)
5806 t = build_varargs_function_type_array (t, n, args);
5807 else
5808 t = build_function_type_array (t, n, args);
5809
5810 egress:
5811 builtin_types[def] = t;
5812 va_end (list);
5813 }
5814
5815 /* Build the builtin function types and install them in the builtin_types
5816 array for later use in builtin function decls. */
5817
5818 static void
5819 install_builtin_function_types (void)
5820 {
5821 tree va_list_ref_type_node;
5822 tree va_list_arg_type_node;
5823
5824 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5825 {
5826 va_list_arg_type_node = va_list_ref_type_node =
5827 build_pointer_type (TREE_TYPE (va_list_type_node));
5828 }
5829 else
5830 {
5831 va_list_arg_type_node = va_list_type_node;
5832 va_list_ref_type_node = build_reference_type (va_list_type_node);
5833 }
5834
5835 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5836 builtin_types[ENUM] = VALUE;
5837 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5838 def_fn_type (ENUM, RETURN, 0, 0);
5839 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5840 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5841 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5842 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5843 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5844 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5845 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5846 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5847 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5848 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5849 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5850 ARG6) \
5851 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5852 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5853 ARG6, ARG7) \
5854 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5855 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5856 def_fn_type (ENUM, RETURN, 1, 0);
5857 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5858 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5859 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5860 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5861 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5862 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5863 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5864 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5865 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5866 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5867 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5868 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5869
5870 #include "builtin-types.def"
5871
5872 #undef DEF_PRIMITIVE_TYPE
5873 #undef DEF_FUNCTION_TYPE_1
5874 #undef DEF_FUNCTION_TYPE_2
5875 #undef DEF_FUNCTION_TYPE_3
5876 #undef DEF_FUNCTION_TYPE_4
5877 #undef DEF_FUNCTION_TYPE_5
5878 #undef DEF_FUNCTION_TYPE_6
5879 #undef DEF_FUNCTION_TYPE_VAR_0
5880 #undef DEF_FUNCTION_TYPE_VAR_1
5881 #undef DEF_FUNCTION_TYPE_VAR_2
5882 #undef DEF_FUNCTION_TYPE_VAR_3
5883 #undef DEF_FUNCTION_TYPE_VAR_4
5884 #undef DEF_FUNCTION_TYPE_VAR_5
5885 #undef DEF_POINTER_TYPE
5886 builtin_types[(int) BT_LAST] = NULL_TREE;
5887 }
5888
5889 /* ----------------------------------------------------------------------- *
5890 * BUILTIN ATTRIBUTES *
5891 * ----------------------------------------------------------------------- */
5892
5893 enum built_in_attribute
5894 {
5895 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5896 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5897 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5898 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5899 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5900 #include "builtin-attrs.def"
5901 #undef DEF_ATTR_NULL_TREE
5902 #undef DEF_ATTR_INT
5903 #undef DEF_ATTR_STRING
5904 #undef DEF_ATTR_IDENT
5905 #undef DEF_ATTR_TREE_LIST
5906 ATTR_LAST
5907 };
5908
5909 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5910
5911 static void
5912 install_builtin_attributes (void)
5913 {
5914 /* Fill in the built_in_attributes array. */
5915 #define DEF_ATTR_NULL_TREE(ENUM) \
5916 built_in_attributes[(int) ENUM] = NULL_TREE;
5917 #define DEF_ATTR_INT(ENUM, VALUE) \
5918 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5919 #define DEF_ATTR_STRING(ENUM, VALUE) \
5920 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5921 #define DEF_ATTR_IDENT(ENUM, STRING) \
5922 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5923 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5924 built_in_attributes[(int) ENUM] \
5925 = tree_cons (built_in_attributes[(int) PURPOSE], \
5926 built_in_attributes[(int) VALUE], \
5927 built_in_attributes[(int) CHAIN]);
5928 #include "builtin-attrs.def"
5929 #undef DEF_ATTR_NULL_TREE
5930 #undef DEF_ATTR_INT
5931 #undef DEF_ATTR_STRING
5932 #undef DEF_ATTR_IDENT
5933 #undef DEF_ATTR_TREE_LIST
5934 }
5935
5936 /* Handle a "const" attribute; arguments as in
5937 struct attribute_spec.handler. */
5938
5939 static tree
5940 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5941 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5942 bool *no_add_attrs)
5943 {
5944 if (TREE_CODE (*node) == FUNCTION_DECL)
5945 TREE_READONLY (*node) = 1;
5946 else
5947 *no_add_attrs = true;
5948
5949 return NULL_TREE;
5950 }
5951
5952 /* Handle a "nothrow" attribute; arguments as in
5953 struct attribute_spec.handler. */
5954
5955 static tree
5956 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5957 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5958 bool *no_add_attrs)
5959 {
5960 if (TREE_CODE (*node) == FUNCTION_DECL)
5961 TREE_NOTHROW (*node) = 1;
5962 else
5963 *no_add_attrs = true;
5964
5965 return NULL_TREE;
5966 }
5967
5968 /* Handle a "pure" attribute; arguments as in
5969 struct attribute_spec.handler. */
5970
5971 static tree
5972 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5973 int ARG_UNUSED (flags), bool *no_add_attrs)
5974 {
5975 if (TREE_CODE (*node) == FUNCTION_DECL)
5976 DECL_PURE_P (*node) = 1;
5977 /* ??? TODO: Support types. */
5978 else
5979 {
5980 warning (OPT_Wattributes, "%qs attribute ignored",
5981 IDENTIFIER_POINTER (name));
5982 *no_add_attrs = true;
5983 }
5984
5985 return NULL_TREE;
5986 }
5987
5988 /* Handle a "no vops" attribute; arguments as in
5989 struct attribute_spec.handler. */
5990
5991 static tree
5992 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5993 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5994 bool *ARG_UNUSED (no_add_attrs))
5995 {
5996 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5997 DECL_IS_NOVOPS (*node) = 1;
5998 return NULL_TREE;
5999 }
6000
6001 /* Helper for nonnull attribute handling; fetch the operand number
6002 from the attribute argument list. */
6003
6004 static bool
6005 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
6006 {
6007 /* Verify the arg number is a constant. */
6008 if (TREE_CODE (arg_num_expr) != INTEGER_CST
6009 || TREE_INT_CST_HIGH (arg_num_expr) != 0)
6010 return false;
6011
6012 *valp = TREE_INT_CST_LOW (arg_num_expr);
6013 return true;
6014 }
6015
6016 /* Handle the "nonnull" attribute. */
6017 static tree
6018 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
6019 tree args, int ARG_UNUSED (flags),
6020 bool *no_add_attrs)
6021 {
6022 tree type = *node;
6023 unsigned HOST_WIDE_INT attr_arg_num;
6024
6025 /* If no arguments are specified, all pointer arguments should be
6026 non-null. Verify a full prototype is given so that the arguments
6027 will have the correct types when we actually check them later. */
6028 if (!args)
6029 {
6030 if (!prototype_p (type))
6031 {
6032 error ("nonnull attribute without arguments on a non-prototype");
6033 *no_add_attrs = true;
6034 }
6035 return NULL_TREE;
6036 }
6037
6038 /* Argument list specified. Verify that each argument number references
6039 a pointer argument. */
6040 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
6041 {
6042 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
6043
6044 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
6045 {
6046 error ("nonnull argument has invalid operand number (argument %lu)",
6047 (unsigned long) attr_arg_num);
6048 *no_add_attrs = true;
6049 return NULL_TREE;
6050 }
6051
6052 if (prototype_p (type))
6053 {
6054 function_args_iterator iter;
6055 tree argument;
6056
6057 function_args_iter_init (&iter, type);
6058 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
6059 {
6060 argument = function_args_iter_cond (&iter);
6061 if (!argument || ck_num == arg_num)
6062 break;
6063 }
6064
6065 if (!argument
6066 || TREE_CODE (argument) == VOID_TYPE)
6067 {
6068 error ("nonnull argument with out-of-range operand number "
6069 "(argument %lu, operand %lu)",
6070 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6071 *no_add_attrs = true;
6072 return NULL_TREE;
6073 }
6074
6075 if (TREE_CODE (argument) != POINTER_TYPE)
6076 {
6077 error ("nonnull argument references non-pointer operand "
6078 "(argument %lu, operand %lu)",
6079 (unsigned long) attr_arg_num, (unsigned long) arg_num);
6080 *no_add_attrs = true;
6081 return NULL_TREE;
6082 }
6083 }
6084 }
6085
6086 return NULL_TREE;
6087 }
6088
6089 /* Handle a "sentinel" attribute. */
6090
6091 static tree
6092 handle_sentinel_attribute (tree *node, tree name, tree args,
6093 int ARG_UNUSED (flags), bool *no_add_attrs)
6094 {
6095 if (!prototype_p (*node))
6096 {
6097 warning (OPT_Wattributes,
6098 "%qs attribute requires prototypes with named arguments",
6099 IDENTIFIER_POINTER (name));
6100 *no_add_attrs = true;
6101 }
6102 else
6103 {
6104 if (!stdarg_p (*node))
6105 {
6106 warning (OPT_Wattributes,
6107 "%qs attribute only applies to variadic functions",
6108 IDENTIFIER_POINTER (name));
6109 *no_add_attrs = true;
6110 }
6111 }
6112
6113 if (args)
6114 {
6115 tree position = TREE_VALUE (args);
6116
6117 if (TREE_CODE (position) != INTEGER_CST)
6118 {
6119 warning (0, "requested position is not an integer constant");
6120 *no_add_attrs = true;
6121 }
6122 else
6123 {
6124 if (tree_int_cst_lt (position, integer_zero_node))
6125 {
6126 warning (0, "requested position is less than zero");
6127 *no_add_attrs = true;
6128 }
6129 }
6130 }
6131
6132 return NULL_TREE;
6133 }
6134
6135 /* Handle a "noreturn" attribute; arguments as in
6136 struct attribute_spec.handler. */
6137
6138 static tree
6139 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6140 int ARG_UNUSED (flags), bool *no_add_attrs)
6141 {
6142 tree type = TREE_TYPE (*node);
6143
6144 /* See FIXME comment in c_common_attribute_table. */
6145 if (TREE_CODE (*node) == FUNCTION_DECL)
6146 TREE_THIS_VOLATILE (*node) = 1;
6147 else if (TREE_CODE (type) == POINTER_TYPE
6148 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
6149 TREE_TYPE (*node)
6150 = build_pointer_type
6151 (build_type_variant (TREE_TYPE (type),
6152 TYPE_READONLY (TREE_TYPE (type)), 1));
6153 else
6154 {
6155 warning (OPT_Wattributes, "%qs attribute ignored",
6156 IDENTIFIER_POINTER (name));
6157 *no_add_attrs = true;
6158 }
6159
6160 return NULL_TREE;
6161 }
6162
6163 /* Handle a "leaf" attribute; arguments as in
6164 struct attribute_spec.handler. */
6165
6166 static tree
6167 handle_leaf_attribute (tree *node, tree name,
6168 tree ARG_UNUSED (args),
6169 int ARG_UNUSED (flags), bool *no_add_attrs)
6170 {
6171 if (TREE_CODE (*node) != FUNCTION_DECL)
6172 {
6173 warning (OPT_Wattributes, "%qE attribute ignored", name);
6174 *no_add_attrs = true;
6175 }
6176 if (!TREE_PUBLIC (*node))
6177 {
6178 warning (OPT_Wattributes, "%qE attribute has no effect", name);
6179 *no_add_attrs = true;
6180 }
6181
6182 return NULL_TREE;
6183 }
6184
6185 /* Handle a "malloc" attribute; arguments as in
6186 struct attribute_spec.handler. */
6187
6188 static tree
6189 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6190 int ARG_UNUSED (flags), bool *no_add_attrs)
6191 {
6192 if (TREE_CODE (*node) == FUNCTION_DECL
6193 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
6194 DECL_IS_MALLOC (*node) = 1;
6195 else
6196 {
6197 warning (OPT_Wattributes, "%qs attribute ignored",
6198 IDENTIFIER_POINTER (name));
6199 *no_add_attrs = true;
6200 }
6201
6202 return NULL_TREE;
6203 }
6204
6205 /* Fake handler for attributes we don't properly support. */
6206
6207 tree
6208 fake_attribute_handler (tree * ARG_UNUSED (node),
6209 tree ARG_UNUSED (name),
6210 tree ARG_UNUSED (args),
6211 int ARG_UNUSED (flags),
6212 bool * ARG_UNUSED (no_add_attrs))
6213 {
6214 return NULL_TREE;
6215 }
6216
6217 /* Handle a "type_generic" attribute. */
6218
6219 static tree
6220 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
6221 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
6222 bool * ARG_UNUSED (no_add_attrs))
6223 {
6224 /* Ensure we have a function type. */
6225 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
6226
6227 /* Ensure we have a variadic function. */
6228 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
6229
6230 return NULL_TREE;
6231 }
6232
6233 /* Handle a "vector_size" attribute; arguments as in
6234 struct attribute_spec.handler. */
6235
6236 static tree
6237 handle_vector_size_attribute (tree *node, tree name, tree args,
6238 int ARG_UNUSED (flags),
6239 bool *no_add_attrs)
6240 {
6241 unsigned HOST_WIDE_INT vecsize, nunits;
6242 enum machine_mode orig_mode;
6243 tree type = *node, new_type, size;
6244
6245 *no_add_attrs = true;
6246
6247 size = TREE_VALUE (args);
6248
6249 if (!host_integerp (size, 1))
6250 {
6251 warning (OPT_Wattributes, "%qs attribute ignored",
6252 IDENTIFIER_POINTER (name));
6253 return NULL_TREE;
6254 }
6255
6256 /* Get the vector size (in bytes). */
6257 vecsize = tree_low_cst (size, 1);
6258
6259 /* We need to provide for vector pointers, vector arrays, and
6260 functions returning vectors. For example:
6261
6262 __attribute__((vector_size(16))) short *foo;
6263
6264 In this case, the mode is SI, but the type being modified is
6265 HI, so we need to look further. */
6266
6267 while (POINTER_TYPE_P (type)
6268 || TREE_CODE (type) == FUNCTION_TYPE
6269 || TREE_CODE (type) == ARRAY_TYPE)
6270 type = TREE_TYPE (type);
6271
6272 /* Get the mode of the type being modified. */
6273 orig_mode = TYPE_MODE (type);
6274
6275 if ((!INTEGRAL_TYPE_P (type)
6276 && !SCALAR_FLOAT_TYPE_P (type)
6277 && !FIXED_POINT_TYPE_P (type))
6278 || (!SCALAR_FLOAT_MODE_P (orig_mode)
6279 && GET_MODE_CLASS (orig_mode) != MODE_INT
6280 && !ALL_SCALAR_FIXED_POINT_MODE_P (orig_mode))
6281 || !host_integerp (TYPE_SIZE_UNIT (type), 1)
6282 || TREE_CODE (type) == BOOLEAN_TYPE)
6283 {
6284 error ("invalid vector type for attribute %qs",
6285 IDENTIFIER_POINTER (name));
6286 return NULL_TREE;
6287 }
6288
6289 if (vecsize % tree_low_cst (TYPE_SIZE_UNIT (type), 1))
6290 {
6291 error ("vector size not an integral multiple of component size");
6292 return NULL;
6293 }
6294
6295 if (vecsize == 0)
6296 {
6297 error ("zero vector size");
6298 return NULL;
6299 }
6300
6301 /* Calculate how many units fit in the vector. */
6302 nunits = vecsize / tree_low_cst (TYPE_SIZE_UNIT (type), 1);
6303 if (nunits & (nunits - 1))
6304 {
6305 error ("number of components of the vector not a power of two");
6306 return NULL_TREE;
6307 }
6308
6309 new_type = build_vector_type (type, nunits);
6310
6311 /* Build back pointers if needed. */
6312 *node = reconstruct_complex_type (*node, new_type);
6313
6314 return NULL_TREE;
6315 }
6316
6317 /* Handle a "vector_type" attribute; arguments as in
6318 struct attribute_spec.handler. */
6319
6320 static tree
6321 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
6322 int ARG_UNUSED (flags),
6323 bool *no_add_attrs)
6324 {
6325 /* Vector representative type and size. */
6326 tree rep_type = *node;
6327 tree rep_size = TYPE_SIZE_UNIT (rep_type);
6328 tree rep_name;
6329
6330 /* Vector size in bytes and number of units. */
6331 unsigned HOST_WIDE_INT vec_bytes, vec_units;
6332
6333 /* Vector element type and mode. */
6334 tree elem_type;
6335 enum machine_mode elem_mode;
6336
6337 *no_add_attrs = true;
6338
6339 /* Get the representative array type, possibly nested within a
6340 padding record e.g. for alignment purposes. */
6341
6342 if (TYPE_IS_PADDING_P (rep_type))
6343 rep_type = TREE_TYPE (TYPE_FIELDS (rep_type));
6344
6345 if (TREE_CODE (rep_type) != ARRAY_TYPE)
6346 {
6347 error ("attribute %qs applies to array types only",
6348 IDENTIFIER_POINTER (name));
6349 return NULL_TREE;
6350 }
6351
6352 /* Silently punt on variable sizes. We can't make vector types for them,
6353 need to ignore them on front-end generated subtypes of unconstrained
6354 bases, and this attribute is for binding implementors, not end-users, so
6355 we should never get there from legitimate explicit uses. */
6356
6357 if (!host_integerp (rep_size, 1))
6358 return NULL_TREE;
6359
6360 /* Get the element type/mode and check this is something we know
6361 how to make vectors of. */
6362
6363 elem_type = TREE_TYPE (rep_type);
6364 elem_mode = TYPE_MODE (elem_type);
6365
6366 if ((!INTEGRAL_TYPE_P (elem_type)
6367 && !SCALAR_FLOAT_TYPE_P (elem_type)
6368 && !FIXED_POINT_TYPE_P (elem_type))
6369 || (!SCALAR_FLOAT_MODE_P (elem_mode)
6370 && GET_MODE_CLASS (elem_mode) != MODE_INT
6371 && !ALL_SCALAR_FIXED_POINT_MODE_P (elem_mode))
6372 || !host_integerp (TYPE_SIZE_UNIT (elem_type), 1))
6373 {
6374 error ("invalid element type for attribute %qs",
6375 IDENTIFIER_POINTER (name));
6376 return NULL_TREE;
6377 }
6378
6379 /* Sanity check the vector size and element type consistency. */
6380
6381 vec_bytes = tree_low_cst (rep_size, 1);
6382
6383 if (vec_bytes % tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1))
6384 {
6385 error ("vector size not an integral multiple of component size");
6386 return NULL;
6387 }
6388
6389 if (vec_bytes == 0)
6390 {
6391 error ("zero vector size");
6392 return NULL;
6393 }
6394
6395 vec_units = vec_bytes / tree_low_cst (TYPE_SIZE_UNIT (elem_type), 1);
6396 if (vec_units & (vec_units - 1))
6397 {
6398 error ("number of components of the vector not a power of two");
6399 return NULL_TREE;
6400 }
6401
6402 /* Build the vector type and replace. */
6403
6404 *node = build_vector_type (elem_type, vec_units);
6405 rep_name = TYPE_NAME (rep_type);
6406 if (TREE_CODE (rep_name) == TYPE_DECL)
6407 rep_name = DECL_NAME (rep_name);
6408 TYPE_NAME (*node) = rep_name;
6409 TYPE_REPRESENTATIVE_ARRAY (*node) = rep_type;
6410
6411 return NULL_TREE;
6412 }
6413
6414 /* ----------------------------------------------------------------------- *
6415 * BUILTIN FUNCTIONS *
6416 * ----------------------------------------------------------------------- */
6417
6418 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
6419 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
6420 if nonansi_p and flag_no_nonansi_builtin. */
6421
6422 static void
6423 def_builtin_1 (enum built_in_function fncode,
6424 const char *name,
6425 enum built_in_class fnclass,
6426 tree fntype, tree libtype,
6427 bool both_p, bool fallback_p,
6428 bool nonansi_p ATTRIBUTE_UNUSED,
6429 tree fnattrs, bool implicit_p)
6430 {
6431 tree decl;
6432 const char *libname;
6433
6434 /* Preserve an already installed decl. It most likely was setup in advance
6435 (e.g. as part of the internal builtins) for specific reasons. */
6436 if (builtin_decl_explicit (fncode) != NULL_TREE)
6437 return;
6438
6439 gcc_assert ((!both_p && !fallback_p)
6440 || !strncmp (name, "__builtin_",
6441 strlen ("__builtin_")));
6442
6443 libname = name + strlen ("__builtin_");
6444 decl = add_builtin_function (name, fntype, fncode, fnclass,
6445 (fallback_p ? libname : NULL),
6446 fnattrs);
6447 if (both_p)
6448 /* ??? This is normally further controlled by command-line options
6449 like -fno-builtin, but we don't have them for Ada. */
6450 add_builtin_function (libname, libtype, fncode, fnclass,
6451 NULL, fnattrs);
6452
6453 set_builtin_decl (fncode, decl, implicit_p);
6454 }
6455
6456 static int flag_isoc94 = 0;
6457 static int flag_isoc99 = 0;
6458
6459 /* Install what the common builtins.def offers. */
6460
6461 static void
6462 install_builtin_functions (void)
6463 {
6464 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
6465 NONANSI_P, ATTRS, IMPLICIT, COND) \
6466 if (NAME && COND) \
6467 def_builtin_1 (ENUM, NAME, CLASS, \
6468 builtin_types[(int) TYPE], \
6469 builtin_types[(int) LIBTYPE], \
6470 BOTH_P, FALLBACK_P, NONANSI_P, \
6471 built_in_attributes[(int) ATTRS], IMPLICIT);
6472 #include "builtins.def"
6473 #undef DEF_BUILTIN
6474 }
6475
6476 /* ----------------------------------------------------------------------- *
6477 * BUILTIN FUNCTIONS *
6478 * ----------------------------------------------------------------------- */
6479
6480 /* Install the builtin functions we might need. */
6481
6482 void
6483 gnat_install_builtins (void)
6484 {
6485 install_builtin_elementary_types ();
6486 install_builtin_function_types ();
6487 install_builtin_attributes ();
6488
6489 /* Install builtins used by generic middle-end pieces first. Some of these
6490 know about internal specificities and control attributes accordingly, for
6491 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
6492 the generic definition from builtins.def. */
6493 build_common_builtin_nodes ();
6494
6495 /* Now, install the target specific builtins, such as the AltiVec family on
6496 ppc, and the common set as exposed by builtins.def. */
6497 targetm.init_builtins ();
6498 install_builtin_functions ();
6499 }
6500
6501 #include "gt-ada-utils.h"
6502 #include "gtype-ada.h"