]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils.c
9f81eae81579732d97fab4551fcbd53923fdb6fe
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "tree.h"
31 #include "stringpool.h"
32 #include "stor-layout.h"
33 #include "attribs.h"
34 #include "varasm.h"
35 #include "flags.h"
36 #include "toplev.h"
37 #include "diagnostic-core.h"
38 #include "output.h"
39 #include "ggc.h"
40 #include "debug.h"
41 #include "convert.h"
42 #include "target.h"
43 #include "common/common-target.h"
44 #include "langhooks.h"
45 #include "cgraph.h"
46 #include "diagnostic.h"
47 #include "timevar.h"
48 #include "tree-dump.h"
49 #include "tree-inline.h"
50 #include "tree-iterator.h"
51
52 #include "ada.h"
53 #include "types.h"
54 #include "atree.h"
55 #include "elists.h"
56 #include "namet.h"
57 #include "nlists.h"
58 #include "stringt.h"
59 #include "uintp.h"
60 #include "fe.h"
61 #include "sinfo.h"
62 #include "einfo.h"
63 #include "ada-tree.h"
64 #include "gigi.h"
65
66 /* If nonzero, pretend we are allocating at global level. */
67 int force_global;
68
69 /* The default alignment of "double" floating-point types, i.e. floating
70 point types whose size is equal to 64 bits, or 0 if this alignment is
71 not specifically capped. */
72 int double_float_alignment;
73
74 /* The default alignment of "double" or larger scalar types, i.e. scalar
75 types whose size is greater or equal to 64 bits, or 0 if this alignment
76 is not specifically capped. */
77 int double_scalar_alignment;
78
79 /* True if floating-point arithmetics may use wider intermediate results. */
80 bool fp_arith_may_widen = true;
81
82 /* Tree nodes for the various types and decls we create. */
83 tree gnat_std_decls[(int) ADT_LAST];
84
85 /* Functions to call for each of the possible raise reasons. */
86 tree gnat_raise_decls[(int) LAST_REASON_CODE + 1];
87
88 /* Likewise, but with extra info for each of the possible raise reasons. */
89 tree gnat_raise_decls_ext[(int) LAST_REASON_CODE + 1];
90
91 /* Forward declarations for handlers of attributes. */
92 static tree handle_const_attribute (tree *, tree, tree, int, bool *);
93 static tree handle_nothrow_attribute (tree *, tree, tree, int, bool *);
94 static tree handle_pure_attribute (tree *, tree, tree, int, bool *);
95 static tree handle_novops_attribute (tree *, tree, tree, int, bool *);
96 static tree handle_nonnull_attribute (tree *, tree, tree, int, bool *);
97 static tree handle_sentinel_attribute (tree *, tree, tree, int, bool *);
98 static tree handle_noreturn_attribute (tree *, tree, tree, int, bool *);
99 static tree handle_leaf_attribute (tree *, tree, tree, int, bool *);
100 static tree handle_always_inline_attribute (tree *, tree, tree, int, bool *);
101 static tree handle_malloc_attribute (tree *, tree, tree, int, bool *);
102 static tree handle_type_generic_attribute (tree *, tree, tree, int, bool *);
103 static tree handle_vector_size_attribute (tree *, tree, tree, int, bool *);
104 static tree handle_vector_type_attribute (tree *, tree, tree, int, bool *);
105
106 /* Fake handler for attributes we don't properly support, typically because
107 they'd require dragging a lot of the common-c front-end circuitry. */
108 static tree fake_attribute_handler (tree *, tree, tree, int, bool *);
109
110 /* Table of machine-independent internal attributes for Ada. We support
111 this minimal set of attributes to accommodate the needs of builtins. */
112 const struct attribute_spec gnat_internal_attribute_table[] =
113 {
114 /* { name, min_len, max_len, decl_req, type_req, fn_type_req, handler,
115 affects_type_identity } */
116 { "const", 0, 0, true, false, false, handle_const_attribute,
117 false },
118 { "nothrow", 0, 0, true, false, false, handle_nothrow_attribute,
119 false },
120 { "pure", 0, 0, true, false, false, handle_pure_attribute,
121 false },
122 { "no vops", 0, 0, true, false, false, handle_novops_attribute,
123 false },
124 { "nonnull", 0, -1, false, true, true, handle_nonnull_attribute,
125 false },
126 { "sentinel", 0, 1, false, true, true, handle_sentinel_attribute,
127 false },
128 { "noreturn", 0, 0, true, false, false, handle_noreturn_attribute,
129 false },
130 { "leaf", 0, 0, true, false, false, handle_leaf_attribute,
131 false },
132 { "always_inline",0, 0, true, false, false, handle_always_inline_attribute,
133 false },
134 { "malloc", 0, 0, true, false, false, handle_malloc_attribute,
135 false },
136 { "type generic", 0, 0, false, true, true, handle_type_generic_attribute,
137 false },
138
139 { "vector_size", 1, 1, false, true, false, handle_vector_size_attribute,
140 false },
141 { "vector_type", 0, 0, false, true, false, handle_vector_type_attribute,
142 false },
143 { "may_alias", 0, 0, false, true, false, NULL, false },
144
145 /* ??? format and format_arg are heavy and not supported, which actually
146 prevents support for stdio builtins, which we however declare as part
147 of the common builtins.def contents. */
148 { "format", 3, 3, false, true, true, fake_attribute_handler, false },
149 { "format_arg", 1, 1, false, true, true, fake_attribute_handler, false },
150
151 { NULL, 0, 0, false, false, false, NULL, false }
152 };
153
154 /* Associates a GNAT tree node to a GCC tree node. It is used in
155 `save_gnu_tree', `get_gnu_tree' and `present_gnu_tree'. See documentation
156 of `save_gnu_tree' for more info. */
157 static GTY((length ("max_gnat_nodes"))) tree *associate_gnat_to_gnu;
158
159 #define GET_GNU_TREE(GNAT_ENTITY) \
160 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id]
161
162 #define SET_GNU_TREE(GNAT_ENTITY,VAL) \
163 associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] = (VAL)
164
165 #define PRESENT_GNU_TREE(GNAT_ENTITY) \
166 (associate_gnat_to_gnu[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
167
168 /* Associates a GNAT entity to a GCC tree node used as a dummy, if any. */
169 static GTY((length ("max_gnat_nodes"))) tree *dummy_node_table;
170
171 #define GET_DUMMY_NODE(GNAT_ENTITY) \
172 dummy_node_table[(GNAT_ENTITY) - First_Node_Id]
173
174 #define SET_DUMMY_NODE(GNAT_ENTITY,VAL) \
175 dummy_node_table[(GNAT_ENTITY) - First_Node_Id] = (VAL)
176
177 #define PRESENT_DUMMY_NODE(GNAT_ENTITY) \
178 (dummy_node_table[(GNAT_ENTITY) - First_Node_Id] != NULL_TREE)
179
180 /* This variable keeps a table for types for each precision so that we only
181 allocate each of them once. Signed and unsigned types are kept separate.
182
183 Note that these types are only used when fold-const requests something
184 special. Perhaps we should NOT share these types; we'll see how it
185 goes later. */
186 static GTY(()) tree signed_and_unsigned_types[2 * MAX_BITS_PER_WORD + 1][2];
187
188 /* Likewise for float types, but record these by mode. */
189 static GTY(()) tree float_types[NUM_MACHINE_MODES];
190
191 /* For each binding contour we allocate a binding_level structure to indicate
192 the binding depth. */
193
194 struct GTY((chain_next ("%h.chain"))) gnat_binding_level {
195 /* The binding level containing this one (the enclosing binding level). */
196 struct gnat_binding_level *chain;
197 /* The BLOCK node for this level. */
198 tree block;
199 /* If nonzero, the setjmp buffer that needs to be updated for any
200 variable-sized definition within this context. */
201 tree jmpbuf_decl;
202 };
203
204 /* The binding level currently in effect. */
205 static GTY(()) struct gnat_binding_level *current_binding_level;
206
207 /* A chain of gnat_binding_level structures awaiting reuse. */
208 static GTY((deletable)) struct gnat_binding_level *free_binding_level;
209
210 /* The context to be used for global declarations. */
211 static GTY(()) tree global_context;
212
213 /* An array of global declarations. */
214 static GTY(()) vec<tree, va_gc> *global_decls;
215
216 /* An array of builtin function declarations. */
217 static GTY(()) vec<tree, va_gc> *builtin_decls;
218
219 /* An array of global renaming pointers. */
220 static GTY(()) vec<tree, va_gc> *global_renaming_pointers;
221
222 /* A chain of unused BLOCK nodes. */
223 static GTY((deletable)) tree free_block_chain;
224
225 static int pad_type_hash_marked_p (const void *p);
226 static hashval_t pad_type_hash_hash (const void *p);
227 static int pad_type_hash_eq (const void *p1, const void *p2);
228
229 /* A hash table of padded types. It is modelled on the generic type
230 hash table in tree.c, which must thus be used as a reference. */
231 struct GTY(()) pad_type_hash {
232 unsigned long hash;
233 tree type;
234 };
235
236 static GTY ((if_marked ("pad_type_hash_marked_p"),
237 param_is (struct pad_type_hash)))
238 htab_t pad_type_hash_table;
239
240 static tree merge_sizes (tree, tree, tree, bool, bool);
241 static tree compute_related_constant (tree, tree);
242 static tree split_plus (tree, tree *);
243 static tree float_type_for_precision (int, enum machine_mode);
244 static tree convert_to_fat_pointer (tree, tree);
245 static unsigned int scale_by_factor_of (tree, unsigned int);
246 static bool potential_alignment_gap (tree, tree, tree);
247
248 /* A linked list used as a queue to defer the initialization of the
249 DECL_CONTEXT attribute of ..._DECL nodes and of the TYPE_CONTEXT attribute
250 of ..._TYPE nodes. */
251 struct deferred_decl_context_node
252 {
253 tree decl; /* The ..._DECL node to work on. */
254 Entity_Id gnat_scope; /* The corresponding entity's Scope attribute. */
255 int force_global; /* force_global value when pushing DECL. */
256 vec<tree, va_heap, vl_ptr> types; /* A list of ..._TYPE nodes to propagate the
257 context to. */
258 struct deferred_decl_context_node *next; /* The next queue item. */
259 };
260
261 static struct deferred_decl_context_node *deferred_decl_context_queue = NULL;
262
263 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
264 feed it with the elaboration of GNAT_SCOPE. */
265 static struct deferred_decl_context_node *
266 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global);
267
268 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
269 feed it with the DECL_CONTEXT computed as part of N as soon as it is
270 computed. */
271 static void add_deferred_type_context (struct deferred_decl_context_node *n,
272 tree type);
273 \f
274 /* Initialize data structures of the utils.c module. */
275
276 void
277 init_gnat_utils (void)
278 {
279 /* Initialize the association of GNAT nodes to GCC trees. */
280 associate_gnat_to_gnu = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
281
282 /* Initialize the association of GNAT nodes to GCC trees as dummies. */
283 dummy_node_table = ggc_cleared_vec_alloc<tree> (max_gnat_nodes);
284
285 /* Initialize the hash table of padded types. */
286 pad_type_hash_table
287 = htab_create_ggc (512, pad_type_hash_hash, pad_type_hash_eq, 0);
288 }
289
290 /* Destroy data structures of the utils.c module. */
291
292 void
293 destroy_gnat_utils (void)
294 {
295 /* Destroy the association of GNAT nodes to GCC trees. */
296 ggc_free (associate_gnat_to_gnu);
297 associate_gnat_to_gnu = NULL;
298
299 /* Destroy the association of GNAT nodes to GCC trees as dummies. */
300 ggc_free (dummy_node_table);
301 dummy_node_table = NULL;
302
303 /* Destroy the hash table of padded types. */
304 htab_delete (pad_type_hash_table);
305 pad_type_hash_table = NULL;
306
307 /* Invalidate the global renaming pointers. */
308 invalidate_global_renaming_pointers ();
309 }
310 \f
311 /* GNAT_ENTITY is a GNAT tree node for an entity. Associate GNU_DECL, a GCC
312 tree node, with GNAT_ENTITY. If GNU_DECL is not a ..._DECL node, abort.
313 If NO_CHECK is true, the latter check is suppressed.
314
315 If GNU_DECL is zero, reset a previous association. */
316
317 void
318 save_gnu_tree (Entity_Id gnat_entity, tree gnu_decl, bool no_check)
319 {
320 /* Check that GNAT_ENTITY is not already defined and that it is being set
321 to something which is a decl. If that is not the case, this usually
322 means GNAT_ENTITY is defined twice, but occasionally is due to some
323 Gigi problem. */
324 gcc_assert (!(gnu_decl
325 && (PRESENT_GNU_TREE (gnat_entity)
326 || (!no_check && !DECL_P (gnu_decl)))));
327
328 SET_GNU_TREE (gnat_entity, gnu_decl);
329 }
330
331 /* GNAT_ENTITY is a GNAT tree node for an entity. Return the GCC tree node
332 that was associated with it. If there is no such tree node, abort.
333
334 In some cases, such as delayed elaboration or expressions that need to
335 be elaborated only once, GNAT_ENTITY is really not an entity. */
336
337 tree
338 get_gnu_tree (Entity_Id gnat_entity)
339 {
340 gcc_assert (PRESENT_GNU_TREE (gnat_entity));
341 return GET_GNU_TREE (gnat_entity);
342 }
343
344 /* Return nonzero if a GCC tree has been associated with GNAT_ENTITY. */
345
346 bool
347 present_gnu_tree (Entity_Id gnat_entity)
348 {
349 return PRESENT_GNU_TREE (gnat_entity);
350 }
351 \f
352 /* Make a dummy type corresponding to GNAT_TYPE. */
353
354 tree
355 make_dummy_type (Entity_Id gnat_type)
356 {
357 Entity_Id gnat_equiv = Gigi_Equivalent_Type (Underlying_Type (gnat_type));
358 tree gnu_type;
359
360 /* If there was no equivalent type (can only happen when just annotating
361 types) or underlying type, go back to the original type. */
362 if (No (gnat_equiv))
363 gnat_equiv = gnat_type;
364
365 /* If it there already a dummy type, use that one. Else make one. */
366 if (PRESENT_DUMMY_NODE (gnat_equiv))
367 return GET_DUMMY_NODE (gnat_equiv);
368
369 /* If this is a record, make a RECORD_TYPE or UNION_TYPE; else make
370 an ENUMERAL_TYPE. */
371 gnu_type = make_node (Is_Record_Type (gnat_equiv)
372 ? tree_code_for_record_type (gnat_equiv)
373 : ENUMERAL_TYPE);
374 TYPE_NAME (gnu_type) = get_entity_name (gnat_type);
375 TYPE_DUMMY_P (gnu_type) = 1;
376 TYPE_STUB_DECL (gnu_type)
377 = create_type_stub_decl (TYPE_NAME (gnu_type), gnu_type);
378 if (Is_By_Reference_Type (gnat_equiv))
379 TYPE_BY_REFERENCE_P (gnu_type) = 1;
380
381 SET_DUMMY_NODE (gnat_equiv, gnu_type);
382
383 return gnu_type;
384 }
385
386 /* Return the dummy type that was made for GNAT_TYPE, if any. */
387
388 tree
389 get_dummy_type (Entity_Id gnat_type)
390 {
391 return GET_DUMMY_NODE (gnat_type);
392 }
393
394 /* Build dummy fat and thin pointer types whose designated type is specified
395 by GNAT_DESIG_TYPE/GNU_DESIG_TYPE and attach them to the latter. */
396
397 void
398 build_dummy_unc_pointer_types (Entity_Id gnat_desig_type, tree gnu_desig_type)
399 {
400 tree gnu_template_type, gnu_ptr_template, gnu_array_type, gnu_ptr_array;
401 tree gnu_fat_type, fields, gnu_object_type;
402
403 gnu_template_type = make_node (RECORD_TYPE);
404 TYPE_NAME (gnu_template_type) = create_concat_name (gnat_desig_type, "XUB");
405 TYPE_DUMMY_P (gnu_template_type) = 1;
406 gnu_ptr_template = build_pointer_type (gnu_template_type);
407
408 gnu_array_type = make_node (ENUMERAL_TYPE);
409 TYPE_NAME (gnu_array_type) = create_concat_name (gnat_desig_type, "XUA");
410 TYPE_DUMMY_P (gnu_array_type) = 1;
411 gnu_ptr_array = build_pointer_type (gnu_array_type);
412
413 gnu_fat_type = make_node (RECORD_TYPE);
414 /* Build a stub DECL to trigger the special processing for fat pointer types
415 in gnat_pushdecl. */
416 TYPE_NAME (gnu_fat_type)
417 = create_type_stub_decl (create_concat_name (gnat_desig_type, "XUP"),
418 gnu_fat_type);
419 fields = create_field_decl (get_identifier ("P_ARRAY"), gnu_ptr_array,
420 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
421 DECL_CHAIN (fields)
422 = create_field_decl (get_identifier ("P_BOUNDS"), gnu_ptr_template,
423 gnu_fat_type, NULL_TREE, NULL_TREE, 0, 0);
424 finish_fat_pointer_type (gnu_fat_type, fields);
425 SET_TYPE_UNCONSTRAINED_ARRAY (gnu_fat_type, gnu_desig_type);
426 /* Suppress debug info until after the type is completed. */
427 TYPE_DECL_SUPPRESS_DEBUG (TYPE_STUB_DECL (gnu_fat_type)) = 1;
428
429 gnu_object_type = make_node (RECORD_TYPE);
430 TYPE_NAME (gnu_object_type) = create_concat_name (gnat_desig_type, "XUT");
431 TYPE_DUMMY_P (gnu_object_type) = 1;
432
433 TYPE_POINTER_TO (gnu_desig_type) = gnu_fat_type;
434 TYPE_OBJECT_RECORD_TYPE (gnu_desig_type) = gnu_object_type;
435 }
436 \f
437 /* Return true if we are in the global binding level. */
438
439 bool
440 global_bindings_p (void)
441 {
442 return force_global || current_function_decl == NULL_TREE;
443 }
444
445 /* Enter a new binding level. */
446
447 void
448 gnat_pushlevel (void)
449 {
450 struct gnat_binding_level *newlevel = NULL;
451
452 /* Reuse a struct for this binding level, if there is one. */
453 if (free_binding_level)
454 {
455 newlevel = free_binding_level;
456 free_binding_level = free_binding_level->chain;
457 }
458 else
459 newlevel = ggc_alloc<gnat_binding_level> ();
460
461 /* Use a free BLOCK, if any; otherwise, allocate one. */
462 if (free_block_chain)
463 {
464 newlevel->block = free_block_chain;
465 free_block_chain = BLOCK_CHAIN (free_block_chain);
466 BLOCK_CHAIN (newlevel->block) = NULL_TREE;
467 }
468 else
469 newlevel->block = make_node (BLOCK);
470
471 /* Point the BLOCK we just made to its parent. */
472 if (current_binding_level)
473 BLOCK_SUPERCONTEXT (newlevel->block) = current_binding_level->block;
474
475 BLOCK_VARS (newlevel->block) = NULL_TREE;
476 BLOCK_SUBBLOCKS (newlevel->block) = NULL_TREE;
477 TREE_USED (newlevel->block) = 1;
478
479 /* Add this level to the front of the chain (stack) of active levels. */
480 newlevel->chain = current_binding_level;
481 newlevel->jmpbuf_decl = NULL_TREE;
482 current_binding_level = newlevel;
483 }
484
485 /* Set SUPERCONTEXT of the BLOCK for the current binding level to FNDECL
486 and point FNDECL to this BLOCK. */
487
488 void
489 set_current_block_context (tree fndecl)
490 {
491 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
492 DECL_INITIAL (fndecl) = current_binding_level->block;
493 set_block_for_group (current_binding_level->block);
494 }
495
496 /* Set the jmpbuf_decl for the current binding level to DECL. */
497
498 void
499 set_block_jmpbuf_decl (tree decl)
500 {
501 current_binding_level->jmpbuf_decl = decl;
502 }
503
504 /* Get the jmpbuf_decl, if any, for the current binding level. */
505
506 tree
507 get_block_jmpbuf_decl (void)
508 {
509 return current_binding_level->jmpbuf_decl;
510 }
511
512 /* Exit a binding level. Set any BLOCK into the current code group. */
513
514 void
515 gnat_poplevel (void)
516 {
517 struct gnat_binding_level *level = current_binding_level;
518 tree block = level->block;
519
520 BLOCK_VARS (block) = nreverse (BLOCK_VARS (block));
521 BLOCK_SUBBLOCKS (block) = blocks_nreverse (BLOCK_SUBBLOCKS (block));
522
523 /* If this is a function-level BLOCK don't do anything. Otherwise, if there
524 are no variables free the block and merge its subblocks into those of its
525 parent block. Otherwise, add it to the list of its parent. */
526 if (TREE_CODE (BLOCK_SUPERCONTEXT (block)) == FUNCTION_DECL)
527 ;
528 else if (BLOCK_VARS (block) == NULL_TREE)
529 {
530 BLOCK_SUBBLOCKS (level->chain->block)
531 = block_chainon (BLOCK_SUBBLOCKS (block),
532 BLOCK_SUBBLOCKS (level->chain->block));
533 BLOCK_CHAIN (block) = free_block_chain;
534 free_block_chain = block;
535 }
536 else
537 {
538 BLOCK_CHAIN (block) = BLOCK_SUBBLOCKS (level->chain->block);
539 BLOCK_SUBBLOCKS (level->chain->block) = block;
540 TREE_USED (block) = 1;
541 set_block_for_group (block);
542 }
543
544 /* Free this binding structure. */
545 current_binding_level = level->chain;
546 level->chain = free_binding_level;
547 free_binding_level = level;
548 }
549
550 /* Exit a binding level and discard the associated BLOCK. */
551
552 void
553 gnat_zaplevel (void)
554 {
555 struct gnat_binding_level *level = current_binding_level;
556 tree block = level->block;
557
558 BLOCK_CHAIN (block) = free_block_chain;
559 free_block_chain = block;
560
561 /* Free this binding structure. */
562 current_binding_level = level->chain;
563 level->chain = free_binding_level;
564 free_binding_level = level;
565 }
566 \f
567 /* Set the context of TYPE and its parallel types (if any) to CONTEXT. */
568
569 static void
570 gnat_set_type_context (tree type, tree context)
571 {
572 tree decl = TYPE_STUB_DECL (type);
573
574 TYPE_CONTEXT (type) = context;
575
576 while (decl && DECL_PARALLEL_TYPE (decl))
577 {
578 tree parallel_type = DECL_PARALLEL_TYPE (decl);
579
580 /* Give a context to the parallel types and their stub decl, if any.
581 Some parallel types seems to be present in multiple parallel type
582 chains, so don't mess with their context if they already have one. */
583 if (TYPE_CONTEXT (parallel_type) == NULL_TREE)
584 {
585 if (TYPE_STUB_DECL (parallel_type) != NULL_TREE)
586 DECL_CONTEXT (TYPE_STUB_DECL (parallel_type)) = context;
587 TYPE_CONTEXT (parallel_type) = context;
588 }
589
590 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
591 }
592 }
593
594 /* Return the innermost scope, starting at GNAT_NODE, we are be interested in
595 the debug info, or Empty if there is no such scope. If not NULL, set
596 IS_SUBPROGRAM to whether the returned entity is a subprogram. */
597
598 static Entity_Id
599 get_debug_scope (Node_Id gnat_node, bool *is_subprogram)
600 {
601 Entity_Id gnat_entity;
602
603 if (is_subprogram)
604 *is_subprogram = false;
605
606 if (Nkind (gnat_node) == N_Defining_Identifier)
607 gnat_entity = Scope (gnat_node);
608 else
609 return Empty;
610
611 while (Present (gnat_entity))
612 {
613 switch (Ekind (gnat_entity))
614 {
615 case E_Function:
616 case E_Procedure:
617 if (Present (Protected_Body_Subprogram (gnat_entity)))
618 gnat_entity = Protected_Body_Subprogram (gnat_entity);
619
620 /* If the scope is a subprogram, then just rely on
621 current_function_decl, so that we don't have to defer
622 anything. This is needed because other places rely on the
623 validity of the DECL_CONTEXT attribute of FUNCTION_DECL nodes. */
624 if (is_subprogram)
625 *is_subprogram = true;
626 return gnat_entity;
627
628 case E_Record_Type:
629 case E_Record_Subtype:
630 return gnat_entity;
631
632 default:
633 /* By default, we are not interested in this particular scope: go to
634 the outer one. */
635 break;
636 }
637 gnat_entity = Scope (gnat_entity);
638 }
639 return Empty;
640 }
641
642 /* If N is NULL, set TYPE's context to CONTEXT. Defer this to the processing of
643 N otherwise. */
644
645 static void
646 defer_or_set_type_context (tree type,
647 tree context,
648 struct deferred_decl_context_node *n)
649 {
650 if (n)
651 add_deferred_type_context (n, type);
652 else
653 gnat_set_type_context (type, context);
654 }
655
656 /* Return global_context. Create it if needed, first. */
657
658 static tree
659 get_global_context (void)
660 {
661 if (!global_context)
662 global_context = build_translation_unit_decl (NULL_TREE);
663 return global_context;
664 }
665
666 /* Record DECL as belonging to the current lexical scope and use GNAT_NODE
667 for location information and flag propagation. */
668
669 void
670 gnat_pushdecl (tree decl, Node_Id gnat_node)
671 {
672 tree context = NULL_TREE;
673 struct deferred_decl_context_node *deferred_decl_context = NULL;
674
675 /* If explicitely asked to make DECL global or if it's an imported nested
676 object, short-circuit the regular Scope-based context computation. */
677 if (!((TREE_PUBLIC (decl) && DECL_EXTERNAL (decl)) || force_global == 1))
678 {
679 /* Rely on the GNAT scope, or fallback to the current_function_decl if
680 the GNAT scope reached the global scope, if it reached a subprogram
681 or the declaration is a subprogram or a variable (for them we skip
682 intermediate context types because the subprogram body elaboration
683 machinery and the inliner both expect a subprogram context).
684
685 Falling back to current_function_decl is necessary for implicit
686 subprograms created by gigi, such as the elaboration subprograms. */
687 bool context_is_subprogram = false;
688 const Entity_Id gnat_scope
689 = get_debug_scope (gnat_node, &context_is_subprogram);
690
691 if (Present (gnat_scope)
692 && !context_is_subprogram
693 && TREE_CODE (decl) != FUNCTION_DECL
694 && TREE_CODE (decl) != VAR_DECL)
695 /* Always assume the scope has not been elaborated, thus defer the
696 context propagation to the time its elaboration will be
697 available. */
698 deferred_decl_context
699 = add_deferred_decl_context (decl, gnat_scope, force_global);
700
701 /* External declarations (when force_global > 0) may not be in a
702 local context. */
703 else if (current_function_decl != NULL_TREE && force_global == 0)
704 context = current_function_decl;
705 }
706
707 /* If either we are forced to be in global mode or if both the GNAT scope and
708 the current_function_decl did not help determining the context, use the
709 global scope. */
710 if (!deferred_decl_context && context == NULL_TREE)
711 context = get_global_context ();
712
713 /* Functions imported in another function are not really nested.
714 For really nested functions mark them initially as needing
715 a static chain for uses of that flag before unnesting;
716 lower_nested_functions will then recompute it. */
717 if (TREE_CODE (decl) == FUNCTION_DECL
718 && !TREE_PUBLIC (decl)
719 && context != NULL_TREE
720 && (TREE_CODE (context) == FUNCTION_DECL
721 || decl_function_context (context) != NULL_TREE))
722 DECL_STATIC_CHAIN (decl) = 1;
723
724 if (!deferred_decl_context)
725 DECL_CONTEXT (decl) = context;
726
727 TREE_NO_WARNING (decl) = (No (gnat_node) || Warnings_Off (gnat_node));
728
729 /* Set the location of DECL and emit a declaration for it. */
730 if (Present (gnat_node) && !renaming_from_generic_instantiation_p (gnat_node))
731 Sloc_to_locus (Sloc (gnat_node), &DECL_SOURCE_LOCATION (decl));
732
733 add_decl_expr (decl, gnat_node);
734
735 /* Put the declaration on the list. The list of declarations is in reverse
736 order. The list will be reversed later. Put global declarations in the
737 globals list and local ones in the current block. But skip TYPE_DECLs
738 for UNCONSTRAINED_ARRAY_TYPE in both cases, as they will cause trouble
739 with the debugger and aren't needed anyway. */
740 if (!(TREE_CODE (decl) == TYPE_DECL
741 && TREE_CODE (TREE_TYPE (decl)) == UNCONSTRAINED_ARRAY_TYPE))
742 {
743 if (DECL_EXTERNAL (decl))
744 {
745 if (TREE_CODE (decl) == FUNCTION_DECL && DECL_BUILT_IN (decl))
746 vec_safe_push (builtin_decls, decl);
747 }
748 else if (global_bindings_p ())
749 vec_safe_push (global_decls, decl);
750 else
751 {
752 DECL_CHAIN (decl) = BLOCK_VARS (current_binding_level->block);
753 BLOCK_VARS (current_binding_level->block) = decl;
754 }
755 }
756
757 /* For the declaration of a type, set its name if it either is not already
758 set or if the previous type name was not derived from a source name.
759 We'd rather have the type named with a real name and all the pointer
760 types to the same object have the same POINTER_TYPE node. Code in the
761 equivalent function of c-decl.c makes a copy of the type node here, but
762 that may cause us trouble with incomplete types. We make an exception
763 for fat pointer types because the compiler automatically builds them
764 for unconstrained array types and the debugger uses them to represent
765 both these and pointers to these. */
766 if (TREE_CODE (decl) == TYPE_DECL && DECL_NAME (decl))
767 {
768 tree t = TREE_TYPE (decl);
769
770 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
771 {
772 /* Array and pointer types aren't "tagged" types so we force the
773 type to be associated with its typedef in the DWARF back-end,
774 in order to make sure that the latter is always preserved. */
775 if (!DECL_ARTIFICIAL (decl)
776 && (TREE_CODE (t) == ARRAY_TYPE
777 || TREE_CODE (t) == POINTER_TYPE))
778 {
779 tree tt = build_distinct_type_copy (t);
780 if (TREE_CODE (t) == POINTER_TYPE)
781 TYPE_NEXT_PTR_TO (t) = tt;
782 TYPE_NAME (tt) = DECL_NAME (decl);
783 defer_or_set_type_context (tt,
784 DECL_CONTEXT (decl),
785 deferred_decl_context);
786 TYPE_STUB_DECL (tt) = TYPE_STUB_DECL (t);
787 DECL_ORIGINAL_TYPE (decl) = tt;
788 }
789 }
790 else if (TYPE_IS_FAT_POINTER_P (t))
791 {
792 /* We need a variant for the placeholder machinery to work. */
793 tree tt = build_variant_type_copy (t);
794 TYPE_NAME (tt) = decl;
795 defer_or_set_type_context (tt,
796 DECL_CONTEXT (decl),
797 deferred_decl_context);
798 TREE_USED (tt) = TREE_USED (t);
799 TREE_TYPE (decl) = tt;
800 if (DECL_ORIGINAL_TYPE (TYPE_NAME (t)))
801 DECL_ORIGINAL_TYPE (decl) = DECL_ORIGINAL_TYPE (TYPE_NAME (t));
802 else
803 DECL_ORIGINAL_TYPE (decl) = t;
804 DECL_ARTIFICIAL (decl) = 0;
805 t = NULL_TREE;
806 }
807 else if (DECL_ARTIFICIAL (TYPE_NAME (t)) && !DECL_ARTIFICIAL (decl))
808 ;
809 else
810 t = NULL_TREE;
811
812 /* Propagate the name to all the anonymous variants. This is needed
813 for the type qualifiers machinery to work properly. Also propagate
814 the context to them. Note that the context will be propagated to all
815 parallel types too thanks to gnat_set_type_context. */
816 if (t)
817 for (t = TYPE_MAIN_VARIANT (t); t; t = TYPE_NEXT_VARIANT (t))
818 if (!(TYPE_NAME (t) && TREE_CODE (TYPE_NAME (t)) == TYPE_DECL))
819 {
820 TYPE_NAME (t) = decl;
821 defer_or_set_type_context (t,
822 DECL_CONTEXT (decl),
823 deferred_decl_context);
824 }
825 }
826 }
827 \f
828 /* Create a record type that contains a SIZE bytes long field of TYPE with a
829 starting bit position so that it is aligned to ALIGN bits, and leaving at
830 least ROOM bytes free before the field. BASE_ALIGN is the alignment the
831 record is guaranteed to get. GNAT_NODE is used for the position of the
832 associated TYPE_DECL. */
833
834 tree
835 make_aligning_type (tree type, unsigned int align, tree size,
836 unsigned int base_align, int room, Node_Id gnat_node)
837 {
838 /* We will be crafting a record type with one field at a position set to be
839 the next multiple of ALIGN past record'address + room bytes. We use a
840 record placeholder to express record'address. */
841 tree record_type = make_node (RECORD_TYPE);
842 tree record = build0 (PLACEHOLDER_EXPR, record_type);
843
844 tree record_addr_st
845 = convert (sizetype, build_unary_op (ADDR_EXPR, NULL_TREE, record));
846
847 /* The diagram below summarizes the shape of what we manipulate:
848
849 <--------- pos ---------->
850 { +------------+-------------+-----------------+
851 record =>{ |############| ... | field (type) |
852 { +------------+-------------+-----------------+
853 |<-- room -->|<- voffset ->|<---- size ----->|
854 o o
855 | |
856 record_addr vblock_addr
857
858 Every length is in sizetype bytes there, except "pos" which has to be
859 set as a bit position in the GCC tree for the record. */
860 tree room_st = size_int (room);
861 tree vblock_addr_st = size_binop (PLUS_EXPR, record_addr_st, room_st);
862 tree voffset_st, pos, field;
863
864 tree name = TYPE_IDENTIFIER (type);
865
866 name = concat_name (name, "ALIGN");
867 TYPE_NAME (record_type) = name;
868
869 /* Compute VOFFSET and then POS. The next byte position multiple of some
870 alignment after some address is obtained by "and"ing the alignment minus
871 1 with the two's complement of the address. */
872 voffset_st = size_binop (BIT_AND_EXPR,
873 fold_build1 (NEGATE_EXPR, sizetype, vblock_addr_st),
874 size_int ((align / BITS_PER_UNIT) - 1));
875
876 /* POS = (ROOM + VOFFSET) * BIT_PER_UNIT, in bitsizetype. */
877 pos = size_binop (MULT_EXPR,
878 convert (bitsizetype,
879 size_binop (PLUS_EXPR, room_st, voffset_st)),
880 bitsize_unit_node);
881
882 /* Craft the GCC record representation. We exceptionally do everything
883 manually here because 1) our generic circuitry is not quite ready to
884 handle the complex position/size expressions we are setting up, 2) we
885 have a strong simplifying factor at hand: we know the maximum possible
886 value of voffset, and 3) we have to set/reset at least the sizes in
887 accordance with this maximum value anyway, as we need them to convey
888 what should be "alloc"ated for this type.
889
890 Use -1 as the 'addressable' indication for the field to prevent the
891 creation of a bitfield. We don't need one, it would have damaging
892 consequences on the alignment computation, and create_field_decl would
893 make one without this special argument, for instance because of the
894 complex position expression. */
895 field = create_field_decl (get_identifier ("F"), type, record_type, size,
896 pos, 1, -1);
897 TYPE_FIELDS (record_type) = field;
898
899 TYPE_ALIGN (record_type) = base_align;
900 TYPE_USER_ALIGN (record_type) = 1;
901
902 TYPE_SIZE (record_type)
903 = size_binop (PLUS_EXPR,
904 size_binop (MULT_EXPR, convert (bitsizetype, size),
905 bitsize_unit_node),
906 bitsize_int (align + room * BITS_PER_UNIT));
907 TYPE_SIZE_UNIT (record_type)
908 = size_binop (PLUS_EXPR, size,
909 size_int (room + align / BITS_PER_UNIT));
910
911 SET_TYPE_MODE (record_type, BLKmode);
912 relate_alias_sets (record_type, type, ALIAS_SET_COPY);
913
914 /* Declare it now since it will never be declared otherwise. This is
915 necessary to ensure that its subtrees are properly marked. */
916 create_type_decl (name, record_type, true, false, gnat_node);
917
918 return record_type;
919 }
920
921 /* TYPE is a RECORD_TYPE, UNION_TYPE or QUAL_UNION_TYPE that is being used
922 as the field type of a packed record if IN_RECORD is true, or as the
923 component type of a packed array if IN_RECORD is false. See if we can
924 rewrite it either as a type that has a non-BLKmode, which we can pack
925 tighter in the packed record case, or as a smaller type. If so, return
926 the new type. If not, return the original type. */
927
928 tree
929 make_packable_type (tree type, bool in_record)
930 {
931 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE (type));
932 unsigned HOST_WIDE_INT new_size;
933 tree new_type, old_field, field_list = NULL_TREE;
934 unsigned int align;
935
936 /* No point in doing anything if the size is zero. */
937 if (size == 0)
938 return type;
939
940 new_type = make_node (TREE_CODE (type));
941
942 /* Copy the name and flags from the old type to that of the new.
943 Note that we rely on the pointer equality created here for
944 TYPE_NAME to look through conversions in various places. */
945 TYPE_NAME (new_type) = TYPE_NAME (type);
946 TYPE_JUSTIFIED_MODULAR_P (new_type) = TYPE_JUSTIFIED_MODULAR_P (type);
947 TYPE_CONTAINS_TEMPLATE_P (new_type) = TYPE_CONTAINS_TEMPLATE_P (type);
948 if (TREE_CODE (type) == RECORD_TYPE)
949 TYPE_PADDING_P (new_type) = TYPE_PADDING_P (type);
950
951 /* If we are in a record and have a small size, set the alignment to
952 try for an integral mode. Otherwise set it to try for a smaller
953 type with BLKmode. */
954 if (in_record && size <= MAX_FIXED_MODE_SIZE)
955 {
956 align = ceil_pow2 (size);
957 TYPE_ALIGN (new_type) = align;
958 new_size = (size + align - 1) & -align;
959 }
960 else
961 {
962 unsigned HOST_WIDE_INT align;
963
964 /* Do not try to shrink the size if the RM size is not constant. */
965 if (TYPE_CONTAINS_TEMPLATE_P (type)
966 || !tree_fits_uhwi_p (TYPE_ADA_SIZE (type)))
967 return type;
968
969 /* Round the RM size up to a unit boundary to get the minimal size
970 for a BLKmode record. Give up if it's already the size. */
971 new_size = tree_to_uhwi (TYPE_ADA_SIZE (type));
972 new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT;
973 if (new_size == size)
974 return type;
975
976 align = new_size & -new_size;
977 TYPE_ALIGN (new_type) = MIN (TYPE_ALIGN (type), align);
978 }
979
980 TYPE_USER_ALIGN (new_type) = 1;
981
982 /* Now copy the fields, keeping the position and size as we don't want
983 to change the layout by propagating the packedness downwards. */
984 for (old_field = TYPE_FIELDS (type); old_field;
985 old_field = DECL_CHAIN (old_field))
986 {
987 tree new_field_type = TREE_TYPE (old_field);
988 tree new_field, new_size;
989
990 if (RECORD_OR_UNION_TYPE_P (new_field_type)
991 && !TYPE_FAT_POINTER_P (new_field_type)
992 && tree_fits_uhwi_p (TYPE_SIZE (new_field_type)))
993 new_field_type = make_packable_type (new_field_type, true);
994
995 /* However, for the last field in a not already packed record type
996 that is of an aggregate type, we need to use the RM size in the
997 packable version of the record type, see finish_record_type. */
998 if (!DECL_CHAIN (old_field)
999 && !TYPE_PACKED (type)
1000 && RECORD_OR_UNION_TYPE_P (new_field_type)
1001 && !TYPE_FAT_POINTER_P (new_field_type)
1002 && !TYPE_CONTAINS_TEMPLATE_P (new_field_type)
1003 && TYPE_ADA_SIZE (new_field_type))
1004 new_size = TYPE_ADA_SIZE (new_field_type);
1005 else
1006 new_size = DECL_SIZE (old_field);
1007
1008 new_field
1009 = create_field_decl (DECL_NAME (old_field), new_field_type, new_type,
1010 new_size, bit_position (old_field),
1011 TYPE_PACKED (type),
1012 !DECL_NONADDRESSABLE_P (old_field));
1013
1014 DECL_INTERNAL_P (new_field) = DECL_INTERNAL_P (old_field);
1015 SET_DECL_ORIGINAL_FIELD_TO_FIELD (new_field, old_field);
1016 if (TREE_CODE (new_type) == QUAL_UNION_TYPE)
1017 DECL_QUALIFIER (new_field) = DECL_QUALIFIER (old_field);
1018
1019 DECL_CHAIN (new_field) = field_list;
1020 field_list = new_field;
1021 }
1022
1023 finish_record_type (new_type, nreverse (field_list), 2, false);
1024 relate_alias_sets (new_type, type, ALIAS_SET_COPY);
1025 if (TYPE_STUB_DECL (type))
1026 SET_DECL_PARALLEL_TYPE (TYPE_STUB_DECL (new_type),
1027 DECL_PARALLEL_TYPE (TYPE_STUB_DECL (type)));
1028
1029 /* If this is a padding record, we never want to make the size smaller
1030 than what was specified. For QUAL_UNION_TYPE, also copy the size. */
1031 if (TYPE_IS_PADDING_P (type) || TREE_CODE (type) == QUAL_UNION_TYPE)
1032 {
1033 TYPE_SIZE (new_type) = TYPE_SIZE (type);
1034 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (type);
1035 new_size = size;
1036 }
1037 else
1038 {
1039 TYPE_SIZE (new_type) = bitsize_int (new_size);
1040 TYPE_SIZE_UNIT (new_type)
1041 = size_int ((new_size + BITS_PER_UNIT - 1) / BITS_PER_UNIT);
1042 }
1043
1044 if (!TYPE_CONTAINS_TEMPLATE_P (type))
1045 SET_TYPE_ADA_SIZE (new_type, TYPE_ADA_SIZE (type));
1046
1047 compute_record_mode (new_type);
1048
1049 /* Try harder to get a packable type if necessary, for example
1050 in case the record itself contains a BLKmode field. */
1051 if (in_record && TYPE_MODE (new_type) == BLKmode)
1052 SET_TYPE_MODE (new_type,
1053 mode_for_size_tree (TYPE_SIZE (new_type), MODE_INT, 1));
1054
1055 /* If neither the mode nor the size has shrunk, return the old type. */
1056 if (TYPE_MODE (new_type) == BLKmode && new_size >= size)
1057 return type;
1058
1059 return new_type;
1060 }
1061
1062 /* Given a type TYPE, return a new type whose size is appropriate for SIZE.
1063 If TYPE is the best type, return it. Otherwise, make a new type. We
1064 only support new integral and pointer types. FOR_BIASED is true if
1065 we are making a biased type. */
1066
1067 tree
1068 make_type_from_size (tree type, tree size_tree, bool for_biased)
1069 {
1070 unsigned HOST_WIDE_INT size;
1071 bool biased_p;
1072 tree new_type;
1073
1074 /* If size indicates an error, just return TYPE to avoid propagating
1075 the error. Likewise if it's too large to represent. */
1076 if (!size_tree || !tree_fits_uhwi_p (size_tree))
1077 return type;
1078
1079 size = tree_to_uhwi (size_tree);
1080
1081 switch (TREE_CODE (type))
1082 {
1083 case INTEGER_TYPE:
1084 case ENUMERAL_TYPE:
1085 case BOOLEAN_TYPE:
1086 biased_p = (TREE_CODE (type) == INTEGER_TYPE
1087 && TYPE_BIASED_REPRESENTATION_P (type));
1088
1089 /* Integer types with precision 0 are forbidden. */
1090 if (size == 0)
1091 size = 1;
1092
1093 /* Only do something if the type isn't a packed array type and doesn't
1094 already have the proper size and the size isn't too large. */
1095 if (TYPE_IS_PACKED_ARRAY_TYPE_P (type)
1096 || (TYPE_PRECISION (type) == size && biased_p == for_biased)
1097 || size > LONG_LONG_TYPE_SIZE)
1098 break;
1099
1100 biased_p |= for_biased;
1101 if (TYPE_UNSIGNED (type) || biased_p)
1102 new_type = make_unsigned_type (size);
1103 else
1104 new_type = make_signed_type (size);
1105 TREE_TYPE (new_type) = TREE_TYPE (type) ? TREE_TYPE (type) : type;
1106 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_MIN_VALUE (type));
1107 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_MAX_VALUE (type));
1108 /* Copy the name to show that it's essentially the same type and
1109 not a subrange type. */
1110 TYPE_NAME (new_type) = TYPE_NAME (type);
1111 TYPE_BIASED_REPRESENTATION_P (new_type) = biased_p;
1112 SET_TYPE_RM_SIZE (new_type, bitsize_int (size));
1113 return new_type;
1114
1115 case RECORD_TYPE:
1116 /* Do something if this is a fat pointer, in which case we
1117 may need to return the thin pointer. */
1118 if (TYPE_FAT_POINTER_P (type) && size < POINTER_SIZE * 2)
1119 {
1120 enum machine_mode p_mode = mode_for_size (size, MODE_INT, 0);
1121 if (!targetm.valid_pointer_mode (p_mode))
1122 p_mode = ptr_mode;
1123 return
1124 build_pointer_type_for_mode
1125 (TYPE_OBJECT_RECORD_TYPE (TYPE_UNCONSTRAINED_ARRAY (type)),
1126 p_mode, 0);
1127 }
1128 break;
1129
1130 case POINTER_TYPE:
1131 /* Only do something if this is a thin pointer, in which case we
1132 may need to return the fat pointer. */
1133 if (TYPE_IS_THIN_POINTER_P (type) && size >= POINTER_SIZE * 2)
1134 return
1135 build_pointer_type (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)));
1136 break;
1137
1138 default:
1139 break;
1140 }
1141
1142 return type;
1143 }
1144
1145 /* See if the data pointed to by the hash table slot is marked. */
1146
1147 static int
1148 pad_type_hash_marked_p (const void *p)
1149 {
1150 const_tree const type = ((const struct pad_type_hash *) p)->type;
1151
1152 return ggc_marked_p (type);
1153 }
1154
1155 /* Return the cached hash value. */
1156
1157 static hashval_t
1158 pad_type_hash_hash (const void *p)
1159 {
1160 return ((const struct pad_type_hash *) p)->hash;
1161 }
1162
1163 /* Return 1 iff the padded types are equivalent. */
1164
1165 static int
1166 pad_type_hash_eq (const void *p1, const void *p2)
1167 {
1168 const struct pad_type_hash *const t1 = (const struct pad_type_hash *) p1;
1169 const struct pad_type_hash *const t2 = (const struct pad_type_hash *) p2;
1170 tree type1, type2;
1171
1172 if (t1->hash != t2->hash)
1173 return 0;
1174
1175 type1 = t1->type;
1176 type2 = t2->type;
1177
1178 /* We consider that the padded types are equivalent if they pad the same
1179 type and have the same size, alignment and RM size. Taking the mode
1180 into account is redundant since it is determined by the others. */
1181 return
1182 TREE_TYPE (TYPE_FIELDS (type1)) == TREE_TYPE (TYPE_FIELDS (type2))
1183 && TYPE_SIZE (type1) == TYPE_SIZE (type2)
1184 && TYPE_ALIGN (type1) == TYPE_ALIGN (type2)
1185 && TYPE_ADA_SIZE (type1) == TYPE_ADA_SIZE (type2);
1186 }
1187
1188 /* Look up the padded TYPE in the hash table and return its canonical version
1189 if it exists; otherwise, insert it into the hash table. */
1190
1191 static tree
1192 lookup_and_insert_pad_type (tree type)
1193 {
1194 hashval_t hashcode;
1195 struct pad_type_hash in, *h;
1196 void **loc;
1197
1198 hashcode
1199 = iterative_hash_object (TYPE_HASH (TREE_TYPE (TYPE_FIELDS (type))), 0);
1200 hashcode = iterative_hash_expr (TYPE_SIZE (type), hashcode);
1201 hashcode = iterative_hash_hashval_t (TYPE_ALIGN (type), hashcode);
1202 hashcode = iterative_hash_expr (TYPE_ADA_SIZE (type), hashcode);
1203
1204 in.hash = hashcode;
1205 in.type = type;
1206 h = (struct pad_type_hash *)
1207 htab_find_with_hash (pad_type_hash_table, &in, hashcode);
1208 if (h)
1209 return h->type;
1210
1211 h = ggc_alloc<pad_type_hash> ();
1212 h->hash = hashcode;
1213 h->type = type;
1214 loc = htab_find_slot_with_hash (pad_type_hash_table, h, hashcode, INSERT);
1215 *loc = (void *)h;
1216 return NULL_TREE;
1217 }
1218
1219 /* Ensure that TYPE has SIZE and ALIGN. Make and return a new padded type
1220 if needed. We have already verified that SIZE and ALIGN are large enough.
1221 GNAT_ENTITY is used to name the resulting record and to issue a warning.
1222 IS_COMPONENT_TYPE is true if this is being done for the component type of
1223 an array. IS_USER_TYPE is true if the original type needs to be completed.
1224 DEFINITION is true if this type is being defined. SET_RM_SIZE is true if
1225 the RM size of the resulting type is to be set to SIZE too. */
1226
1227 tree
1228 maybe_pad_type (tree type, tree size, unsigned int align,
1229 Entity_Id gnat_entity, bool is_component_type,
1230 bool is_user_type, bool definition, bool set_rm_size)
1231 {
1232 tree orig_size = TYPE_SIZE (type);
1233 unsigned int orig_align = TYPE_ALIGN (type);
1234 tree record, field;
1235
1236 /* If TYPE is a padded type, see if it agrees with any size and alignment
1237 we were given. If so, return the original type. Otherwise, strip
1238 off the padding, since we will either be returning the inner type
1239 or repadding it. If no size or alignment is specified, use that of
1240 the original padded type. */
1241 if (TYPE_IS_PADDING_P (type))
1242 {
1243 if ((!size
1244 || operand_equal_p (round_up (size, orig_align), orig_size, 0))
1245 && (align == 0 || align == orig_align))
1246 return type;
1247
1248 if (!size)
1249 size = orig_size;
1250 if (align == 0)
1251 align = orig_align;
1252
1253 type = TREE_TYPE (TYPE_FIELDS (type));
1254 orig_size = TYPE_SIZE (type);
1255 orig_align = TYPE_ALIGN (type);
1256 }
1257
1258 /* If the size is either not being changed or is being made smaller (which
1259 is not done here and is only valid for bitfields anyway), show the size
1260 isn't changing. Likewise, clear the alignment if it isn't being
1261 changed. Then return if we aren't doing anything. */
1262 if (size
1263 && (operand_equal_p (size, orig_size, 0)
1264 || (TREE_CODE (orig_size) == INTEGER_CST
1265 && tree_int_cst_lt (size, orig_size))))
1266 size = NULL_TREE;
1267
1268 if (align == orig_align)
1269 align = 0;
1270
1271 if (align == 0 && !size)
1272 return type;
1273
1274 /* If requested, complete the original type and give it a name. */
1275 if (is_user_type)
1276 create_type_decl (get_entity_name (gnat_entity), type,
1277 !Comes_From_Source (gnat_entity),
1278 !(TYPE_NAME (type)
1279 && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1280 && DECL_IGNORED_P (TYPE_NAME (type))),
1281 gnat_entity);
1282
1283 /* We used to modify the record in place in some cases, but that could
1284 generate incorrect debugging information. So make a new record
1285 type and name. */
1286 record = make_node (RECORD_TYPE);
1287 TYPE_PADDING_P (record) = 1;
1288
1289 if (Present (gnat_entity))
1290 TYPE_NAME (record) = create_concat_name (gnat_entity, "PAD");
1291
1292 TYPE_ALIGN (record) = align ? align : orig_align;
1293 TYPE_SIZE (record) = size ? size : orig_size;
1294 TYPE_SIZE_UNIT (record)
1295 = convert (sizetype,
1296 size_binop (CEIL_DIV_EXPR, TYPE_SIZE (record),
1297 bitsize_unit_node));
1298
1299 /* If we are changing the alignment and the input type is a record with
1300 BLKmode and a small constant size, try to make a form that has an
1301 integral mode. This might allow the padding record to also have an
1302 integral mode, which will be much more efficient. There is no point
1303 in doing so if a size is specified unless it is also a small constant
1304 size and it is incorrect to do so if we cannot guarantee that the mode
1305 will be naturally aligned since the field must always be addressable.
1306
1307 ??? This might not always be a win when done for a stand-alone object:
1308 since the nominal and the effective type of the object will now have
1309 different modes, a VIEW_CONVERT_EXPR will be required for converting
1310 between them and it might be hard to overcome afterwards, including
1311 at the RTL level when the stand-alone object is accessed as a whole. */
1312 if (align != 0
1313 && RECORD_OR_UNION_TYPE_P (type)
1314 && TYPE_MODE (type) == BLKmode
1315 && !TYPE_BY_REFERENCE_P (type)
1316 && TREE_CODE (orig_size) == INTEGER_CST
1317 && !TREE_OVERFLOW (orig_size)
1318 && compare_tree_int (orig_size, MAX_FIXED_MODE_SIZE) <= 0
1319 && (!size
1320 || (TREE_CODE (size) == INTEGER_CST
1321 && compare_tree_int (size, MAX_FIXED_MODE_SIZE) <= 0)))
1322 {
1323 tree packable_type = make_packable_type (type, true);
1324 if (TYPE_MODE (packable_type) != BLKmode
1325 && align >= TYPE_ALIGN (packable_type))
1326 type = packable_type;
1327 }
1328
1329 /* Now create the field with the original size. */
1330 field = create_field_decl (get_identifier ("F"), type, record, orig_size,
1331 bitsize_zero_node, 0, 1);
1332 DECL_INTERNAL_P (field) = 1;
1333
1334 /* Do not emit debug info until after the auxiliary record is built. */
1335 finish_record_type (record, field, 1, false);
1336
1337 /* Set the RM size if requested. */
1338 if (set_rm_size)
1339 {
1340 tree canonical_pad_type;
1341
1342 SET_TYPE_ADA_SIZE (record, size ? size : orig_size);
1343
1344 /* If the padded type is complete and has constant size, we canonicalize
1345 it by means of the hash table. This is consistent with the language
1346 semantics and ensures that gigi and the middle-end have a common view
1347 of these padded types. */
1348 if (TREE_CONSTANT (TYPE_SIZE (record))
1349 && (canonical_pad_type = lookup_and_insert_pad_type (record)))
1350 {
1351 record = canonical_pad_type;
1352 goto built;
1353 }
1354 }
1355
1356 /* Unless debugging information isn't being written for the input type,
1357 write a record that shows what we are a subtype of and also make a
1358 variable that indicates our size, if still variable. */
1359 if (TREE_CODE (orig_size) != INTEGER_CST
1360 && TYPE_NAME (record)
1361 && TYPE_NAME (type)
1362 && !(TREE_CODE (TYPE_NAME (type)) == TYPE_DECL
1363 && DECL_IGNORED_P (TYPE_NAME (type))))
1364 {
1365 tree marker = make_node (RECORD_TYPE);
1366 tree name = TYPE_IDENTIFIER (record);
1367 tree orig_name = TYPE_IDENTIFIER (type);
1368
1369 TYPE_NAME (marker) = concat_name (name, "XVS");
1370 finish_record_type (marker,
1371 create_field_decl (orig_name,
1372 build_reference_type (type),
1373 marker, NULL_TREE, NULL_TREE,
1374 0, 0),
1375 0, true);
1376
1377 add_parallel_type (record, marker);
1378
1379 if (definition && size && TREE_CODE (size) != INTEGER_CST)
1380 TYPE_SIZE_UNIT (marker)
1381 = create_var_decl (concat_name (name, "XVZ"), NULL_TREE, sizetype,
1382 TYPE_SIZE_UNIT (record), false, false, false,
1383 false, NULL, gnat_entity);
1384 }
1385
1386 rest_of_record_type_compilation (record);
1387
1388 built:
1389 /* If the size was widened explicitly, maybe give a warning. Take the
1390 original size as the maximum size of the input if there was an
1391 unconstrained record involved and round it up to the specified alignment,
1392 if one was specified. But don't do it if we are just annotating types
1393 and the type is tagged, since tagged types aren't fully laid out in this
1394 mode. */
1395 if (!size
1396 || TREE_CODE (size) == COND_EXPR
1397 || TREE_CODE (size) == MAX_EXPR
1398 || No (gnat_entity)
1399 || (type_annotate_only && Is_Tagged_Type (Etype (gnat_entity))))
1400 return record;
1401
1402 if (CONTAINS_PLACEHOLDER_P (orig_size))
1403 orig_size = max_size (orig_size, true);
1404
1405 if (align)
1406 orig_size = round_up (orig_size, align);
1407
1408 if (!operand_equal_p (size, orig_size, 0)
1409 && !(TREE_CODE (size) == INTEGER_CST
1410 && TREE_CODE (orig_size) == INTEGER_CST
1411 && (TREE_OVERFLOW (size)
1412 || TREE_OVERFLOW (orig_size)
1413 || tree_int_cst_lt (size, orig_size))))
1414 {
1415 Node_Id gnat_error_node = Empty;
1416
1417 /* For a packed array, post the message on the original array type. */
1418 if (Is_Packed_Array_Impl_Type (gnat_entity))
1419 gnat_entity = Original_Array_Type (gnat_entity);
1420
1421 if ((Ekind (gnat_entity) == E_Component
1422 || Ekind (gnat_entity) == E_Discriminant)
1423 && Present (Component_Clause (gnat_entity)))
1424 gnat_error_node = Last_Bit (Component_Clause (gnat_entity));
1425 else if (Present (Size_Clause (gnat_entity)))
1426 gnat_error_node = Expression (Size_Clause (gnat_entity));
1427
1428 /* Generate message only for entities that come from source, since
1429 if we have an entity created by expansion, the message will be
1430 generated for some other corresponding source entity. */
1431 if (Comes_From_Source (gnat_entity))
1432 {
1433 if (Present (gnat_error_node))
1434 post_error_ne_tree ("{^ }bits of & unused?",
1435 gnat_error_node, gnat_entity,
1436 size_diffop (size, orig_size));
1437 else if (is_component_type)
1438 post_error_ne_tree ("component of& padded{ by ^ bits}?",
1439 gnat_entity, gnat_entity,
1440 size_diffop (size, orig_size));
1441 }
1442 }
1443
1444 return record;
1445 }
1446 \f
1447 /* Relate the alias sets of GNU_NEW_TYPE and GNU_OLD_TYPE according to OP.
1448 If this is a multi-dimensional array type, do this recursively.
1449
1450 OP may be
1451 - ALIAS_SET_COPY: the new set is made a copy of the old one.
1452 - ALIAS_SET_SUPERSET: the new set is made a superset of the old one.
1453 - ALIAS_SET_SUBSET: the new set is made a subset of the old one. */
1454
1455 void
1456 relate_alias_sets (tree gnu_new_type, tree gnu_old_type, enum alias_set_op op)
1457 {
1458 /* Remove any padding from GNU_OLD_TYPE. It doesn't matter in the case
1459 of a one-dimensional array, since the padding has the same alias set
1460 as the field type, but if it's a multi-dimensional array, we need to
1461 see the inner types. */
1462 while (TREE_CODE (gnu_old_type) == RECORD_TYPE
1463 && (TYPE_JUSTIFIED_MODULAR_P (gnu_old_type)
1464 || TYPE_PADDING_P (gnu_old_type)))
1465 gnu_old_type = TREE_TYPE (TYPE_FIELDS (gnu_old_type));
1466
1467 /* Unconstrained array types are deemed incomplete and would thus be given
1468 alias set 0. Retrieve the underlying array type. */
1469 if (TREE_CODE (gnu_old_type) == UNCONSTRAINED_ARRAY_TYPE)
1470 gnu_old_type
1471 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_old_type))));
1472 if (TREE_CODE (gnu_new_type) == UNCONSTRAINED_ARRAY_TYPE)
1473 gnu_new_type
1474 = TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (gnu_new_type))));
1475
1476 if (TREE_CODE (gnu_new_type) == ARRAY_TYPE
1477 && TREE_CODE (TREE_TYPE (gnu_new_type)) == ARRAY_TYPE
1478 && TYPE_MULTI_ARRAY_P (TREE_TYPE (gnu_new_type)))
1479 relate_alias_sets (TREE_TYPE (gnu_new_type), TREE_TYPE (gnu_old_type), op);
1480
1481 switch (op)
1482 {
1483 case ALIAS_SET_COPY:
1484 /* The alias set shouldn't be copied between array types with different
1485 aliasing settings because this can break the aliasing relationship
1486 between the array type and its element type. */
1487 #ifndef ENABLE_CHECKING
1488 if (flag_strict_aliasing)
1489 #endif
1490 gcc_assert (!(TREE_CODE (gnu_new_type) == ARRAY_TYPE
1491 && TREE_CODE (gnu_old_type) == ARRAY_TYPE
1492 && TYPE_NONALIASED_COMPONENT (gnu_new_type)
1493 != TYPE_NONALIASED_COMPONENT (gnu_old_type)));
1494
1495 TYPE_ALIAS_SET (gnu_new_type) = get_alias_set (gnu_old_type);
1496 break;
1497
1498 case ALIAS_SET_SUBSET:
1499 case ALIAS_SET_SUPERSET:
1500 {
1501 alias_set_type old_set = get_alias_set (gnu_old_type);
1502 alias_set_type new_set = get_alias_set (gnu_new_type);
1503
1504 /* Do nothing if the alias sets conflict. This ensures that we
1505 never call record_alias_subset several times for the same pair
1506 or at all for alias set 0. */
1507 if (!alias_sets_conflict_p (old_set, new_set))
1508 {
1509 if (op == ALIAS_SET_SUBSET)
1510 record_alias_subset (old_set, new_set);
1511 else
1512 record_alias_subset (new_set, old_set);
1513 }
1514 }
1515 break;
1516
1517 default:
1518 gcc_unreachable ();
1519 }
1520
1521 record_component_aliases (gnu_new_type);
1522 }
1523 \f
1524 /* Record TYPE as a builtin type for Ada. NAME is the name of the type.
1525 ARTIFICIAL_P is true if it's a type that was generated by the compiler. */
1526
1527 void
1528 record_builtin_type (const char *name, tree type, bool artificial_p)
1529 {
1530 tree type_decl = build_decl (input_location,
1531 TYPE_DECL, get_identifier (name), type);
1532 DECL_ARTIFICIAL (type_decl) = artificial_p;
1533 TYPE_ARTIFICIAL (type) = artificial_p;
1534 gnat_pushdecl (type_decl, Empty);
1535
1536 if (debug_hooks->type_decl)
1537 debug_hooks->type_decl (type_decl, false);
1538 }
1539 \f
1540 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1541 finish constructing the record type as a fat pointer type. */
1542
1543 void
1544 finish_fat_pointer_type (tree record_type, tree field_list)
1545 {
1546 /* Make sure we can put it into a register. */
1547 if (STRICT_ALIGNMENT)
1548 TYPE_ALIGN (record_type) = MIN (BIGGEST_ALIGNMENT, 2 * POINTER_SIZE);
1549
1550 /* Show what it really is. */
1551 TYPE_FAT_POINTER_P (record_type) = 1;
1552
1553 /* Do not emit debug info for it since the types of its fields may still be
1554 incomplete at this point. */
1555 finish_record_type (record_type, field_list, 0, false);
1556
1557 /* Force type_contains_placeholder_p to return true on it. Although the
1558 PLACEHOLDER_EXPRs are referenced only indirectly, this isn't a pointer
1559 type but the representation of the unconstrained array. */
1560 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (record_type) = 2;
1561 }
1562
1563 /* Given a record type RECORD_TYPE and a list of FIELD_DECL nodes FIELD_LIST,
1564 finish constructing the record or union type. If REP_LEVEL is zero, this
1565 record has no representation clause and so will be entirely laid out here.
1566 If REP_LEVEL is one, this record has a representation clause and has been
1567 laid out already; only set the sizes and alignment. If REP_LEVEL is two,
1568 this record is derived from a parent record and thus inherits its layout;
1569 only make a pass on the fields to finalize them. DEBUG_INFO_P is true if
1570 we need to write debug information about this type. */
1571
1572 void
1573 finish_record_type (tree record_type, tree field_list, int rep_level,
1574 bool debug_info_p)
1575 {
1576 enum tree_code code = TREE_CODE (record_type);
1577 tree name = TYPE_IDENTIFIER (record_type);
1578 tree ada_size = bitsize_zero_node;
1579 tree size = bitsize_zero_node;
1580 bool had_size = TYPE_SIZE (record_type) != 0;
1581 bool had_size_unit = TYPE_SIZE_UNIT (record_type) != 0;
1582 bool had_align = TYPE_ALIGN (record_type) != 0;
1583 tree field;
1584
1585 TYPE_FIELDS (record_type) = field_list;
1586
1587 /* Always attach the TYPE_STUB_DECL for a record type. It is required to
1588 generate debug info and have a parallel type. */
1589 TYPE_STUB_DECL (record_type) = create_type_stub_decl (name, record_type);
1590
1591 /* Globally initialize the record first. If this is a rep'ed record,
1592 that just means some initializations; otherwise, layout the record. */
1593 if (rep_level > 0)
1594 {
1595 TYPE_ALIGN (record_type) = MAX (BITS_PER_UNIT, TYPE_ALIGN (record_type));
1596
1597 if (!had_size_unit)
1598 TYPE_SIZE_UNIT (record_type) = size_zero_node;
1599
1600 if (!had_size)
1601 TYPE_SIZE (record_type) = bitsize_zero_node;
1602
1603 /* For all-repped records with a size specified, lay the QUAL_UNION_TYPE
1604 out just like a UNION_TYPE, since the size will be fixed. */
1605 else if (code == QUAL_UNION_TYPE)
1606 code = UNION_TYPE;
1607 }
1608 else
1609 {
1610 /* Ensure there isn't a size already set. There can be in an error
1611 case where there is a rep clause but all fields have errors and
1612 no longer have a position. */
1613 TYPE_SIZE (record_type) = 0;
1614
1615 /* Ensure we use the traditional GCC layout for bitfields when we need
1616 to pack the record type or have a representation clause. The other
1617 possible layout (Microsoft C compiler), if available, would prevent
1618 efficient packing in almost all cases. */
1619 #ifdef TARGET_MS_BITFIELD_LAYOUT
1620 if (TARGET_MS_BITFIELD_LAYOUT && TYPE_PACKED (record_type))
1621 decl_attributes (&record_type,
1622 tree_cons (get_identifier ("gcc_struct"),
1623 NULL_TREE, NULL_TREE),
1624 ATTR_FLAG_TYPE_IN_PLACE);
1625 #endif
1626
1627 layout_type (record_type);
1628 }
1629
1630 /* At this point, the position and size of each field is known. It was
1631 either set before entry by a rep clause, or by laying out the type above.
1632
1633 We now run a pass over the fields (in reverse order for QUAL_UNION_TYPEs)
1634 to compute the Ada size; the GCC size and alignment (for rep'ed records
1635 that are not padding types); and the mode (for rep'ed records). We also
1636 clear the DECL_BIT_FIELD indication for the cases we know have not been
1637 handled yet, and adjust DECL_NONADDRESSABLE_P accordingly. */
1638
1639 if (code == QUAL_UNION_TYPE)
1640 field_list = nreverse (field_list);
1641
1642 for (field = field_list; field; field = DECL_CHAIN (field))
1643 {
1644 tree type = TREE_TYPE (field);
1645 tree pos = bit_position (field);
1646 tree this_size = DECL_SIZE (field);
1647 tree this_ada_size;
1648
1649 if (RECORD_OR_UNION_TYPE_P (type)
1650 && !TYPE_FAT_POINTER_P (type)
1651 && !TYPE_CONTAINS_TEMPLATE_P (type)
1652 && TYPE_ADA_SIZE (type))
1653 this_ada_size = TYPE_ADA_SIZE (type);
1654 else
1655 this_ada_size = this_size;
1656
1657 /* Clear DECL_BIT_FIELD for the cases layout_decl does not handle. */
1658 if (DECL_BIT_FIELD (field)
1659 && operand_equal_p (this_size, TYPE_SIZE (type), 0))
1660 {
1661 unsigned int align = TYPE_ALIGN (type);
1662
1663 /* In the general case, type alignment is required. */
1664 if (value_factor_p (pos, align))
1665 {
1666 /* The enclosing record type must be sufficiently aligned.
1667 Otherwise, if no alignment was specified for it and it
1668 has been laid out already, bump its alignment to the
1669 desired one if this is compatible with its size. */
1670 if (TYPE_ALIGN (record_type) >= align)
1671 {
1672 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1673 DECL_BIT_FIELD (field) = 0;
1674 }
1675 else if (!had_align
1676 && rep_level == 0
1677 && value_factor_p (TYPE_SIZE (record_type), align))
1678 {
1679 TYPE_ALIGN (record_type) = align;
1680 DECL_ALIGN (field) = MAX (DECL_ALIGN (field), align);
1681 DECL_BIT_FIELD (field) = 0;
1682 }
1683 }
1684
1685 /* In the non-strict alignment case, only byte alignment is. */
1686 if (!STRICT_ALIGNMENT
1687 && DECL_BIT_FIELD (field)
1688 && value_factor_p (pos, BITS_PER_UNIT))
1689 DECL_BIT_FIELD (field) = 0;
1690 }
1691
1692 /* If we still have DECL_BIT_FIELD set at this point, we know that the
1693 field is technically not addressable. Except that it can actually
1694 be addressed if it is BLKmode and happens to be properly aligned. */
1695 if (DECL_BIT_FIELD (field)
1696 && !(DECL_MODE (field) == BLKmode
1697 && value_factor_p (pos, BITS_PER_UNIT)))
1698 DECL_NONADDRESSABLE_P (field) = 1;
1699
1700 /* A type must be as aligned as its most aligned field that is not
1701 a bit-field. But this is already enforced by layout_type. */
1702 if (rep_level > 0 && !DECL_BIT_FIELD (field))
1703 TYPE_ALIGN (record_type)
1704 = MAX (TYPE_ALIGN (record_type), DECL_ALIGN (field));
1705
1706 switch (code)
1707 {
1708 case UNION_TYPE:
1709 ada_size = size_binop (MAX_EXPR, ada_size, this_ada_size);
1710 size = size_binop (MAX_EXPR, size, this_size);
1711 break;
1712
1713 case QUAL_UNION_TYPE:
1714 ada_size
1715 = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1716 this_ada_size, ada_size);
1717 size = fold_build3 (COND_EXPR, bitsizetype, DECL_QUALIFIER (field),
1718 this_size, size);
1719 break;
1720
1721 case RECORD_TYPE:
1722 /* Since we know here that all fields are sorted in order of
1723 increasing bit position, the size of the record is one
1724 higher than the ending bit of the last field processed
1725 unless we have a rep clause, since in that case we might
1726 have a field outside a QUAL_UNION_TYPE that has a higher ending
1727 position. So use a MAX in that case. Also, if this field is a
1728 QUAL_UNION_TYPE, we need to take into account the previous size in
1729 the case of empty variants. */
1730 ada_size
1731 = merge_sizes (ada_size, pos, this_ada_size,
1732 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1733 size
1734 = merge_sizes (size, pos, this_size,
1735 TREE_CODE (type) == QUAL_UNION_TYPE, rep_level > 0);
1736 break;
1737
1738 default:
1739 gcc_unreachable ();
1740 }
1741 }
1742
1743 if (code == QUAL_UNION_TYPE)
1744 nreverse (field_list);
1745
1746 if (rep_level < 2)
1747 {
1748 /* If this is a padding record, we never want to make the size smaller
1749 than what was specified in it, if any. */
1750 if (TYPE_IS_PADDING_P (record_type) && TYPE_SIZE (record_type))
1751 size = TYPE_SIZE (record_type);
1752
1753 /* Now set any of the values we've just computed that apply. */
1754 if (!TYPE_FAT_POINTER_P (record_type)
1755 && !TYPE_CONTAINS_TEMPLATE_P (record_type))
1756 SET_TYPE_ADA_SIZE (record_type, ada_size);
1757
1758 if (rep_level > 0)
1759 {
1760 tree size_unit = had_size_unit
1761 ? TYPE_SIZE_UNIT (record_type)
1762 : convert (sizetype,
1763 size_binop (CEIL_DIV_EXPR, size,
1764 bitsize_unit_node));
1765 unsigned int align = TYPE_ALIGN (record_type);
1766
1767 TYPE_SIZE (record_type) = variable_size (round_up (size, align));
1768 TYPE_SIZE_UNIT (record_type)
1769 = variable_size (round_up (size_unit, align / BITS_PER_UNIT));
1770
1771 compute_record_mode (record_type);
1772 }
1773 }
1774
1775 if (debug_info_p)
1776 rest_of_record_type_compilation (record_type);
1777 }
1778
1779 /* Append PARALLEL_TYPE on the chain of parallel types of TYPE. If
1780 PARRALEL_TYPE has no context and its computation is not deferred yet, also
1781 propagate TYPE's context to PARALLEL_TYPE's or defer its propagation to the
1782 moment TYPE will get a context. */
1783
1784 void
1785 add_parallel_type (tree type, tree parallel_type)
1786 {
1787 tree decl = TYPE_STUB_DECL (type);
1788
1789 while (DECL_PARALLEL_TYPE (decl))
1790 decl = TYPE_STUB_DECL (DECL_PARALLEL_TYPE (decl));
1791
1792 SET_DECL_PARALLEL_TYPE (decl, parallel_type);
1793
1794 /* If PARALLEL_TYPE already has a context, we are done. */
1795 if (TYPE_CONTEXT (parallel_type) != NULL_TREE)
1796 return;
1797
1798 /* Otherwise, try to get one from TYPE's context. */
1799 if (TYPE_CONTEXT (type) != NULL_TREE)
1800 /* TYPE already has a context, so simply propagate it to PARALLEL_TYPE. */
1801 gnat_set_type_context (parallel_type, TYPE_CONTEXT (type));
1802
1803 /* ... otherwise TYPE has not context yet. We know it will thanks to
1804 gnat_pushdecl, and then its context will be propagated to PARALLEL_TYPE.
1805 So we have nothing to do in this case. */
1806 }
1807
1808 /* Return true if TYPE has a parallel type. */
1809
1810 static bool
1811 has_parallel_type (tree type)
1812 {
1813 tree decl = TYPE_STUB_DECL (type);
1814
1815 return DECL_PARALLEL_TYPE (decl) != NULL_TREE;
1816 }
1817
1818 /* Wrap up compilation of RECORD_TYPE, i.e. output all the debug information
1819 associated with it. It need not be invoked directly in most cases since
1820 finish_record_type takes care of doing so, but this can be necessary if
1821 a parallel type is to be attached to the record type. */
1822
1823 void
1824 rest_of_record_type_compilation (tree record_type)
1825 {
1826 bool var_size = false;
1827 tree field;
1828
1829 /* If this is a padded type, the bulk of the debug info has already been
1830 generated for the field's type. */
1831 if (TYPE_IS_PADDING_P (record_type))
1832 return;
1833
1834 /* If the type already has a parallel type (XVS type), then we're done. */
1835 if (has_parallel_type (record_type))
1836 return;
1837
1838 for (field = TYPE_FIELDS (record_type); field; field = DECL_CHAIN (field))
1839 {
1840 /* We need to make an XVE/XVU record if any field has variable size,
1841 whether or not the record does. For example, if we have a union,
1842 it may be that all fields, rounded up to the alignment, have the
1843 same size, in which case we'll use that size. But the debug
1844 output routines (except Dwarf2) won't be able to output the fields,
1845 so we need to make the special record. */
1846 if (TREE_CODE (DECL_SIZE (field)) != INTEGER_CST
1847 /* If a field has a non-constant qualifier, the record will have
1848 variable size too. */
1849 || (TREE_CODE (record_type) == QUAL_UNION_TYPE
1850 && TREE_CODE (DECL_QUALIFIER (field)) != INTEGER_CST))
1851 {
1852 var_size = true;
1853 break;
1854 }
1855 }
1856
1857 /* If this record type is of variable size, make a parallel record type that
1858 will tell the debugger how the former is laid out (see exp_dbug.ads). */
1859 if (var_size)
1860 {
1861 tree new_record_type
1862 = make_node (TREE_CODE (record_type) == QUAL_UNION_TYPE
1863 ? UNION_TYPE : TREE_CODE (record_type));
1864 tree orig_name = TYPE_IDENTIFIER (record_type), new_name;
1865 tree last_pos = bitsize_zero_node;
1866 tree old_field, prev_old_field = NULL_TREE;
1867
1868 new_name
1869 = concat_name (orig_name, TREE_CODE (record_type) == QUAL_UNION_TYPE
1870 ? "XVU" : "XVE");
1871 TYPE_NAME (new_record_type) = new_name;
1872 TYPE_ALIGN (new_record_type) = BIGGEST_ALIGNMENT;
1873 TYPE_STUB_DECL (new_record_type)
1874 = create_type_stub_decl (new_name, new_record_type);
1875 DECL_IGNORED_P (TYPE_STUB_DECL (new_record_type))
1876 = DECL_IGNORED_P (TYPE_STUB_DECL (record_type));
1877 TYPE_SIZE (new_record_type) = size_int (TYPE_ALIGN (record_type));
1878 TYPE_SIZE_UNIT (new_record_type)
1879 = size_int (TYPE_ALIGN (record_type) / BITS_PER_UNIT);
1880
1881 /* Now scan all the fields, replacing each field with a new field
1882 corresponding to the new encoding. */
1883 for (old_field = TYPE_FIELDS (record_type); old_field;
1884 old_field = DECL_CHAIN (old_field))
1885 {
1886 tree field_type = TREE_TYPE (old_field);
1887 tree field_name = DECL_NAME (old_field);
1888 tree curpos = bit_position (old_field);
1889 tree pos, new_field;
1890 bool var = false;
1891 unsigned int align = 0;
1892
1893 /* We're going to do some pattern matching below so remove as many
1894 conversions as possible. */
1895 curpos = remove_conversions (curpos, true);
1896
1897 /* See how the position was modified from the last position.
1898
1899 There are two basic cases we support: a value was added
1900 to the last position or the last position was rounded to
1901 a boundary and they something was added. Check for the
1902 first case first. If not, see if there is any evidence
1903 of rounding. If so, round the last position and retry.
1904
1905 If this is a union, the position can be taken as zero. */
1906 if (TREE_CODE (new_record_type) == UNION_TYPE)
1907 pos = bitsize_zero_node;
1908 else
1909 pos = compute_related_constant (curpos, last_pos);
1910
1911 if (!pos
1912 && TREE_CODE (curpos) == MULT_EXPR
1913 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1)))
1914 {
1915 tree offset = TREE_OPERAND (curpos, 0);
1916 align = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1917 align = scale_by_factor_of (offset, align);
1918 last_pos = round_up (last_pos, align);
1919 pos = compute_related_constant (curpos, last_pos);
1920 }
1921 else if (!pos
1922 && TREE_CODE (curpos) == PLUS_EXPR
1923 && tree_fits_uhwi_p (TREE_OPERAND (curpos, 1))
1924 && TREE_CODE (TREE_OPERAND (curpos, 0)) == MULT_EXPR
1925 && tree_fits_uhwi_p
1926 (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1)))
1927 {
1928 tree offset = TREE_OPERAND (TREE_OPERAND (curpos, 0), 0);
1929 unsigned HOST_WIDE_INT addend
1930 = tree_to_uhwi (TREE_OPERAND (curpos, 1));
1931 align
1932 = tree_to_uhwi (TREE_OPERAND (TREE_OPERAND (curpos, 0), 1));
1933 align = scale_by_factor_of (offset, align);
1934 align = MIN (align, addend & -addend);
1935 last_pos = round_up (last_pos, align);
1936 pos = compute_related_constant (curpos, last_pos);
1937 }
1938 else if (potential_alignment_gap (prev_old_field, old_field, pos))
1939 {
1940 align = TYPE_ALIGN (field_type);
1941 last_pos = round_up (last_pos, align);
1942 pos = compute_related_constant (curpos, last_pos);
1943 }
1944
1945 /* If we can't compute a position, set it to zero.
1946
1947 ??? We really should abort here, but it's too much work
1948 to get this correct for all cases. */
1949 if (!pos)
1950 pos = bitsize_zero_node;
1951
1952 /* See if this type is variable-sized and make a pointer type
1953 and indicate the indirection if so. Beware that the debug
1954 back-end may adjust the position computed above according
1955 to the alignment of the field type, i.e. the pointer type
1956 in this case, if we don't preventively counter that. */
1957 if (TREE_CODE (DECL_SIZE (old_field)) != INTEGER_CST)
1958 {
1959 field_type = build_pointer_type (field_type);
1960 if (align != 0 && TYPE_ALIGN (field_type) > align)
1961 {
1962 field_type = copy_node (field_type);
1963 TYPE_ALIGN (field_type) = align;
1964 }
1965 var = true;
1966 }
1967
1968 /* Make a new field name, if necessary. */
1969 if (var || align != 0)
1970 {
1971 char suffix[16];
1972
1973 if (align != 0)
1974 sprintf (suffix, "XV%c%u", var ? 'L' : 'A',
1975 align / BITS_PER_UNIT);
1976 else
1977 strcpy (suffix, "XVL");
1978
1979 field_name = concat_name (field_name, suffix);
1980 }
1981
1982 new_field
1983 = create_field_decl (field_name, field_type, new_record_type,
1984 DECL_SIZE (old_field), pos, 0, 0);
1985 DECL_CHAIN (new_field) = TYPE_FIELDS (new_record_type);
1986 TYPE_FIELDS (new_record_type) = new_field;
1987
1988 /* If old_field is a QUAL_UNION_TYPE, take its size as being
1989 zero. The only time it's not the last field of the record
1990 is when there are other components at fixed positions after
1991 it (meaning there was a rep clause for every field) and we
1992 want to be able to encode them. */
1993 last_pos = size_binop (PLUS_EXPR, bit_position (old_field),
1994 (TREE_CODE (TREE_TYPE (old_field))
1995 == QUAL_UNION_TYPE)
1996 ? bitsize_zero_node
1997 : DECL_SIZE (old_field));
1998 prev_old_field = old_field;
1999 }
2000
2001 TYPE_FIELDS (new_record_type) = nreverse (TYPE_FIELDS (new_record_type));
2002
2003 add_parallel_type (record_type, new_record_type);
2004 }
2005 }
2006
2007 /* Utility function of above to merge LAST_SIZE, the previous size of a record
2008 with FIRST_BIT and SIZE that describe a field. SPECIAL is true if this
2009 represents a QUAL_UNION_TYPE in which case we must look for COND_EXPRs and
2010 replace a value of zero with the old size. If HAS_REP is true, we take the
2011 MAX of the end position of this field with LAST_SIZE. In all other cases,
2012 we use FIRST_BIT plus SIZE. Return an expression for the size. */
2013
2014 static tree
2015 merge_sizes (tree last_size, tree first_bit, tree size, bool special,
2016 bool has_rep)
2017 {
2018 tree type = TREE_TYPE (last_size);
2019 tree new_size;
2020
2021 if (!special || TREE_CODE (size) != COND_EXPR)
2022 {
2023 new_size = size_binop (PLUS_EXPR, first_bit, size);
2024 if (has_rep)
2025 new_size = size_binop (MAX_EXPR, last_size, new_size);
2026 }
2027
2028 else
2029 new_size = fold_build3 (COND_EXPR, type, TREE_OPERAND (size, 0),
2030 integer_zerop (TREE_OPERAND (size, 1))
2031 ? last_size : merge_sizes (last_size, first_bit,
2032 TREE_OPERAND (size, 1),
2033 1, has_rep),
2034 integer_zerop (TREE_OPERAND (size, 2))
2035 ? last_size : merge_sizes (last_size, first_bit,
2036 TREE_OPERAND (size, 2),
2037 1, has_rep));
2038
2039 /* We don't need any NON_VALUE_EXPRs and they can confuse us (especially
2040 when fed through substitute_in_expr) into thinking that a constant
2041 size is not constant. */
2042 while (TREE_CODE (new_size) == NON_LVALUE_EXPR)
2043 new_size = TREE_OPERAND (new_size, 0);
2044
2045 return new_size;
2046 }
2047
2048 /* Utility function of above to see if OP0 and OP1, both of SIZETYPE, are
2049 related by the addition of a constant. Return that constant if so. */
2050
2051 static tree
2052 compute_related_constant (tree op0, tree op1)
2053 {
2054 tree op0_var, op1_var;
2055 tree op0_con = split_plus (op0, &op0_var);
2056 tree op1_con = split_plus (op1, &op1_var);
2057 tree result = size_binop (MINUS_EXPR, op0_con, op1_con);
2058
2059 if (operand_equal_p (op0_var, op1_var, 0))
2060 return result;
2061 else if (operand_equal_p (op0, size_binop (PLUS_EXPR, op1_var, result), 0))
2062 return result;
2063 else
2064 return 0;
2065 }
2066
2067 /* Utility function of above to split a tree OP which may be a sum, into a
2068 constant part, which is returned, and a variable part, which is stored
2069 in *PVAR. *PVAR may be bitsize_zero_node. All operations must be of
2070 bitsizetype. */
2071
2072 static tree
2073 split_plus (tree in, tree *pvar)
2074 {
2075 /* Strip conversions in order to ease the tree traversal and maximize the
2076 potential for constant or plus/minus discovery. We need to be careful
2077 to always return and set *pvar to bitsizetype trees, but it's worth
2078 the effort. */
2079 in = remove_conversions (in, false);
2080
2081 *pvar = convert (bitsizetype, in);
2082
2083 if (TREE_CODE (in) == INTEGER_CST)
2084 {
2085 *pvar = bitsize_zero_node;
2086 return convert (bitsizetype, in);
2087 }
2088 else if (TREE_CODE (in) == PLUS_EXPR || TREE_CODE (in) == MINUS_EXPR)
2089 {
2090 tree lhs_var, rhs_var;
2091 tree lhs_con = split_plus (TREE_OPERAND (in, 0), &lhs_var);
2092 tree rhs_con = split_plus (TREE_OPERAND (in, 1), &rhs_var);
2093
2094 if (lhs_var == TREE_OPERAND (in, 0)
2095 && rhs_var == TREE_OPERAND (in, 1))
2096 return bitsize_zero_node;
2097
2098 *pvar = size_binop (TREE_CODE (in), lhs_var, rhs_var);
2099 return size_binop (TREE_CODE (in), lhs_con, rhs_con);
2100 }
2101 else
2102 return bitsize_zero_node;
2103 }
2104 \f
2105 /* Return a FUNCTION_TYPE node. RETURN_TYPE is the type returned by the
2106 subprogram. If it is VOID_TYPE, then we are dealing with a procedure,
2107 otherwise we are dealing with a function. PARAM_DECL_LIST is a list of
2108 PARM_DECL nodes that are the subprogram parameters. CICO_LIST is the
2109 copy-in/copy-out list to be stored into the TYPE_CICO_LIST field.
2110 RETURN_UNCONSTRAINED_P is true if the function returns an unconstrained
2111 object. RETURN_BY_DIRECT_REF_P is true if the function returns by direct
2112 reference. RETURN_BY_INVISI_REF_P is true if the function returns by
2113 invisible reference. */
2114
2115 tree
2116 create_subprog_type (tree return_type, tree param_decl_list, tree cico_list,
2117 bool return_unconstrained_p, bool return_by_direct_ref_p,
2118 bool return_by_invisi_ref_p)
2119 {
2120 /* A list of the data type nodes of the subprogram formal parameters.
2121 This list is generated by traversing the input list of PARM_DECL
2122 nodes. */
2123 vec<tree, va_gc> *param_type_list = NULL;
2124 tree t, type;
2125
2126 for (t = param_decl_list; t; t = DECL_CHAIN (t))
2127 vec_safe_push (param_type_list, TREE_TYPE (t));
2128
2129 type = build_function_type_vec (return_type, param_type_list);
2130
2131 /* TYPE may have been shared since GCC hashes types. If it has a different
2132 CICO_LIST, make a copy. Likewise for the various flags. */
2133 if (!fntype_same_flags_p (type, cico_list, return_unconstrained_p,
2134 return_by_direct_ref_p, return_by_invisi_ref_p))
2135 {
2136 type = copy_type (type);
2137 TYPE_CI_CO_LIST (type) = cico_list;
2138 TYPE_RETURN_UNCONSTRAINED_P (type) = return_unconstrained_p;
2139 TYPE_RETURN_BY_DIRECT_REF_P (type) = return_by_direct_ref_p;
2140 TREE_ADDRESSABLE (type) = return_by_invisi_ref_p;
2141 }
2142
2143 return type;
2144 }
2145 \f
2146 /* Return a copy of TYPE but safe to modify in any way. */
2147
2148 tree
2149 copy_type (tree type)
2150 {
2151 tree new_type = copy_node (type);
2152
2153 /* Unshare the language-specific data. */
2154 if (TYPE_LANG_SPECIFIC (type))
2155 {
2156 TYPE_LANG_SPECIFIC (new_type) = NULL;
2157 SET_TYPE_LANG_SPECIFIC (new_type, GET_TYPE_LANG_SPECIFIC (type));
2158 }
2159
2160 /* And the contents of the language-specific slot if needed. */
2161 if ((INTEGRAL_TYPE_P (type) || TREE_CODE (type) == REAL_TYPE)
2162 && TYPE_RM_VALUES (type))
2163 {
2164 TYPE_RM_VALUES (new_type) = NULL_TREE;
2165 SET_TYPE_RM_SIZE (new_type, TYPE_RM_SIZE (type));
2166 SET_TYPE_RM_MIN_VALUE (new_type, TYPE_RM_MIN_VALUE (type));
2167 SET_TYPE_RM_MAX_VALUE (new_type, TYPE_RM_MAX_VALUE (type));
2168 }
2169
2170 /* copy_node clears this field instead of copying it, because it is
2171 aliased with TREE_CHAIN. */
2172 TYPE_STUB_DECL (new_type) = TYPE_STUB_DECL (type);
2173
2174 TYPE_POINTER_TO (new_type) = 0;
2175 TYPE_REFERENCE_TO (new_type) = 0;
2176 TYPE_MAIN_VARIANT (new_type) = new_type;
2177 TYPE_NEXT_VARIANT (new_type) = 0;
2178
2179 return new_type;
2180 }
2181 \f
2182 /* Return a subtype of sizetype with range MIN to MAX and whose
2183 TYPE_INDEX_TYPE is INDEX. GNAT_NODE is used for the position
2184 of the associated TYPE_DECL. */
2185
2186 tree
2187 create_index_type (tree min, tree max, tree index, Node_Id gnat_node)
2188 {
2189 /* First build a type for the desired range. */
2190 tree type = build_nonshared_range_type (sizetype, min, max);
2191
2192 /* Then set the index type. */
2193 SET_TYPE_INDEX_TYPE (type, index);
2194 create_type_decl (NULL_TREE, type, true, false, gnat_node);
2195
2196 return type;
2197 }
2198
2199 /* Return a subtype of TYPE with range MIN to MAX. If TYPE is NULL,
2200 sizetype is used. */
2201
2202 tree
2203 create_range_type (tree type, tree min, tree max)
2204 {
2205 tree range_type;
2206
2207 if (type == NULL_TREE)
2208 type = sizetype;
2209
2210 /* First build a type with the base range. */
2211 range_type = build_nonshared_range_type (type, TYPE_MIN_VALUE (type),
2212 TYPE_MAX_VALUE (type));
2213
2214 /* Then set the actual range. */
2215 SET_TYPE_RM_MIN_VALUE (range_type, min);
2216 SET_TYPE_RM_MAX_VALUE (range_type, max);
2217
2218 return range_type;
2219 }
2220 \f
2221 /* Return a TYPE_DECL node suitable for the TYPE_STUB_DECL field of a type.
2222 TYPE_NAME gives the name of the type and TYPE is a ..._TYPE node giving
2223 its data type. */
2224
2225 tree
2226 create_type_stub_decl (tree type_name, tree type)
2227 {
2228 /* Using a named TYPE_DECL ensures that a type name marker is emitted in
2229 STABS while setting DECL_ARTIFICIAL ensures that no DW_TAG_typedef is
2230 emitted in DWARF. */
2231 tree type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2232 DECL_ARTIFICIAL (type_decl) = 1;
2233 TYPE_ARTIFICIAL (type) = 1;
2234 return type_decl;
2235 }
2236
2237 /* Return a TYPE_DECL node. TYPE_NAME gives the name of the type and TYPE
2238 is a ..._TYPE node giving its data type. ARTIFICIAL_P is true if this
2239 is a declaration that was generated by the compiler. DEBUG_INFO_P is
2240 true if we need to write debug information about this type. GNAT_NODE
2241 is used for the position of the decl. */
2242
2243 tree
2244 create_type_decl (tree type_name, tree type, bool artificial_p,
2245 bool debug_info_p, Node_Id gnat_node)
2246 {
2247 enum tree_code code = TREE_CODE (type);
2248 bool named = TYPE_NAME (type) && TREE_CODE (TYPE_NAME (type)) == TYPE_DECL;
2249 tree type_decl;
2250
2251 /* Only the builtin TYPE_STUB_DECL should be used for dummy types. */
2252 gcc_assert (!TYPE_IS_DUMMY_P (type));
2253
2254 /* If the type hasn't been named yet, we're naming it; preserve an existing
2255 TYPE_STUB_DECL that has been attached to it for some purpose. */
2256 if (!named && TYPE_STUB_DECL (type))
2257 {
2258 type_decl = TYPE_STUB_DECL (type);
2259 DECL_NAME (type_decl) = type_name;
2260 }
2261 else
2262 type_decl = build_decl (input_location, TYPE_DECL, type_name, type);
2263
2264 DECL_ARTIFICIAL (type_decl) = artificial_p;
2265 TYPE_ARTIFICIAL (type) = artificial_p;
2266
2267 /* Add this decl to the current binding level. */
2268 gnat_pushdecl (type_decl, gnat_node);
2269
2270 /* If we're naming the type, equate the TYPE_STUB_DECL to the name.
2271 This causes the name to be also viewed as a "tag" by the debug
2272 back-end, with the advantage that no DW_TAG_typedef is emitted
2273 for artificial "tagged" types in DWARF. */
2274 if (!named)
2275 TYPE_STUB_DECL (type) = type_decl;
2276
2277 /* Do not generate debug info for UNCONSTRAINED_ARRAY_TYPE that the
2278 back-end doesn't support, and for others if we don't need to. */
2279 if (code == UNCONSTRAINED_ARRAY_TYPE || !debug_info_p)
2280 DECL_IGNORED_P (type_decl) = 1;
2281
2282 return type_decl;
2283 }
2284 \f
2285 /* Return a VAR_DECL or CONST_DECL node.
2286
2287 VAR_NAME gives the name of the variable. ASM_NAME is its assembler name
2288 (if provided). TYPE is its data type (a GCC ..._TYPE node). VAR_INIT is
2289 the GCC tree for an optional initial expression; NULL_TREE if none.
2290
2291 CONST_FLAG is true if this variable is constant, in which case we might
2292 return a CONST_DECL node unless CONST_DECL_ALLOWED_P is false.
2293
2294 PUBLIC_FLAG is true if this is for a reference to a public entity or for a
2295 definition to be made visible outside of the current compilation unit, for
2296 instance variable definitions in a package specification.
2297
2298 EXTERN_FLAG is true when processing an external variable declaration (as
2299 opposed to a definition: no storage is to be allocated for the variable).
2300
2301 STATIC_FLAG is only relevant when not at top level. In that case
2302 it indicates whether to always allocate storage to the variable.
2303
2304 GNAT_NODE is used for the position of the decl. */
2305
2306 tree
2307 create_var_decl_1 (tree var_name, tree asm_name, tree type, tree var_init,
2308 bool const_flag, bool public_flag, bool extern_flag,
2309 bool static_flag, bool const_decl_allowed_p,
2310 struct attrib *attr_list, Node_Id gnat_node)
2311 {
2312 /* Whether the initializer is a constant initializer. At the global level
2313 or for an external object or an object to be allocated in static memory,
2314 we check that it is a valid constant expression for use in initializing
2315 a static variable; otherwise, we only check that it is constant. */
2316 bool init_const
2317 = (var_init != 0
2318 && gnat_types_compatible_p (type, TREE_TYPE (var_init))
2319 && (global_bindings_p () || extern_flag || static_flag
2320 ? initializer_constant_valid_p (var_init, TREE_TYPE (var_init)) != 0
2321 : TREE_CONSTANT (var_init)));
2322
2323 /* Whether we will make TREE_CONSTANT the DECL we produce here, in which
2324 case the initializer may be used in-lieu of the DECL node (as done in
2325 Identifier_to_gnu). This is useful to prevent the need of elaboration
2326 code when an identifier for which such a decl is made is in turn used as
2327 an initializer. We used to rely on CONST vs VAR_DECL for this purpose,
2328 but extra constraints apply to this choice (see below) and are not
2329 relevant to the distinction we wish to make. */
2330 bool constant_p = const_flag && init_const;
2331
2332 /* The actual DECL node. CONST_DECL was initially intended for enumerals
2333 and may be used for scalars in general but not for aggregates. */
2334 tree var_decl
2335 = build_decl (input_location,
2336 (constant_p && const_decl_allowed_p
2337 && !AGGREGATE_TYPE_P (type)) ? CONST_DECL : VAR_DECL,
2338 var_name, type);
2339
2340 /* If this is external, throw away any initializations (they will be done
2341 elsewhere) unless this is a constant for which we would like to remain
2342 able to get the initializer. If we are defining a global here, leave a
2343 constant initialization and save any variable elaborations for the
2344 elaboration routine. If we are just annotating types, throw away the
2345 initialization if it isn't a constant. */
2346 if ((extern_flag && !constant_p)
2347 || (type_annotate_only && var_init && !TREE_CONSTANT (var_init)))
2348 var_init = NULL_TREE;
2349
2350 /* At the global level, an initializer requiring code to be generated
2351 produces elaboration statements. Check that such statements are allowed,
2352 that is, not violating a No_Elaboration_Code restriction. */
2353 if (global_bindings_p () && var_init != 0 && !init_const)
2354 Check_Elaboration_Code_Allowed (gnat_node);
2355
2356 DECL_INITIAL (var_decl) = var_init;
2357 TREE_READONLY (var_decl) = const_flag;
2358 DECL_EXTERNAL (var_decl) = extern_flag;
2359 TREE_PUBLIC (var_decl) = public_flag || extern_flag;
2360 TREE_CONSTANT (var_decl) = constant_p;
2361 TREE_THIS_VOLATILE (var_decl) = TREE_SIDE_EFFECTS (var_decl)
2362 = TYPE_VOLATILE (type);
2363
2364 /* Ada doesn't feature Fortran-like COMMON variables so we shouldn't
2365 try to fiddle with DECL_COMMON. However, on platforms that don't
2366 support global BSS sections, uninitialized global variables would
2367 go in DATA instead, thus increasing the size of the executable. */
2368 if (!flag_no_common
2369 && TREE_CODE (var_decl) == VAR_DECL
2370 && TREE_PUBLIC (var_decl)
2371 && !have_global_bss_p ())
2372 DECL_COMMON (var_decl) = 1;
2373
2374 /* At the global binding level, we need to allocate static storage for the
2375 variable if it isn't external. Otherwise, we allocate automatic storage
2376 unless requested not to. */
2377 TREE_STATIC (var_decl)
2378 = !extern_flag && (static_flag || global_bindings_p ());
2379
2380 /* For an external constant whose initializer is not absolute, do not emit
2381 debug info. In DWARF this would mean a global relocation in a read-only
2382 section which runs afoul of the PE-COFF run-time relocation mechanism. */
2383 if (extern_flag
2384 && constant_p
2385 && var_init
2386 && initializer_constant_valid_p (var_init, TREE_TYPE (var_init))
2387 != null_pointer_node)
2388 DECL_IGNORED_P (var_decl) = 1;
2389
2390 if (TREE_SIDE_EFFECTS (var_decl))
2391 TREE_ADDRESSABLE (var_decl) = 1;
2392
2393 /* ??? Some attributes cannot be applied to CONST_DECLs. */
2394 if (TREE_CODE (var_decl) == VAR_DECL)
2395 process_attributes (&var_decl, &attr_list, true, gnat_node);
2396
2397 /* Add this decl to the current binding level. */
2398 gnat_pushdecl (var_decl, gnat_node);
2399
2400 if (TREE_CODE (var_decl) == VAR_DECL)
2401 {
2402 if (asm_name)
2403 SET_DECL_ASSEMBLER_NAME (var_decl, asm_name);
2404
2405 if (global_bindings_p ())
2406 rest_of_decl_compilation (var_decl, true, 0);
2407 }
2408
2409 return var_decl;
2410 }
2411 \f
2412 /* Return true if TYPE, an aggregate type, contains (or is) an array. */
2413
2414 static bool
2415 aggregate_type_contains_array_p (tree type)
2416 {
2417 switch (TREE_CODE (type))
2418 {
2419 case RECORD_TYPE:
2420 case UNION_TYPE:
2421 case QUAL_UNION_TYPE:
2422 {
2423 tree field;
2424 for (field = TYPE_FIELDS (type); field; field = DECL_CHAIN (field))
2425 if (AGGREGATE_TYPE_P (TREE_TYPE (field))
2426 && aggregate_type_contains_array_p (TREE_TYPE (field)))
2427 return true;
2428 return false;
2429 }
2430
2431 case ARRAY_TYPE:
2432 return true;
2433
2434 default:
2435 gcc_unreachable ();
2436 }
2437 }
2438
2439 /* Return a FIELD_DECL node. FIELD_NAME is the field's name, FIELD_TYPE is
2440 its type and RECORD_TYPE is the type of the enclosing record. If SIZE is
2441 nonzero, it is the specified size of the field. If POS is nonzero, it is
2442 the bit position. PACKED is 1 if the enclosing record is packed, -1 if it
2443 has Component_Alignment of Storage_Unit. If ADDRESSABLE is nonzero, it
2444 means we are allowed to take the address of the field; if it is negative,
2445 we should not make a bitfield, which is used by make_aligning_type. */
2446
2447 tree
2448 create_field_decl (tree field_name, tree field_type, tree record_type,
2449 tree size, tree pos, int packed, int addressable)
2450 {
2451 tree field_decl = build_decl (input_location,
2452 FIELD_DECL, field_name, field_type);
2453
2454 DECL_CONTEXT (field_decl) = record_type;
2455 TREE_READONLY (field_decl) = TYPE_READONLY (field_type);
2456
2457 /* If FIELD_TYPE is BLKmode, we must ensure this is aligned to at least a
2458 byte boundary since GCC cannot handle less-aligned BLKmode bitfields.
2459 Likewise for an aggregate without specified position that contains an
2460 array, because in this case slices of variable length of this array
2461 must be handled by GCC and variable-sized objects need to be aligned
2462 to at least a byte boundary. */
2463 if (packed && (TYPE_MODE (field_type) == BLKmode
2464 || (!pos
2465 && AGGREGATE_TYPE_P (field_type)
2466 && aggregate_type_contains_array_p (field_type))))
2467 DECL_ALIGN (field_decl) = BITS_PER_UNIT;
2468
2469 /* If a size is specified, use it. Otherwise, if the record type is packed
2470 compute a size to use, which may differ from the object's natural size.
2471 We always set a size in this case to trigger the checks for bitfield
2472 creation below, which is typically required when no position has been
2473 specified. */
2474 if (size)
2475 size = convert (bitsizetype, size);
2476 else if (packed == 1)
2477 {
2478 size = rm_size (field_type);
2479 if (TYPE_MODE (field_type) == BLKmode)
2480 size = round_up (size, BITS_PER_UNIT);
2481 }
2482
2483 /* If we may, according to ADDRESSABLE, make a bitfield if a size is
2484 specified for two reasons: first if the size differs from the natural
2485 size. Second, if the alignment is insufficient. There are a number of
2486 ways the latter can be true.
2487
2488 We never make a bitfield if the type of the field has a nonconstant size,
2489 because no such entity requiring bitfield operations should reach here.
2490
2491 We do *preventively* make a bitfield when there might be the need for it
2492 but we don't have all the necessary information to decide, as is the case
2493 of a field with no specified position in a packed record.
2494
2495 We also don't look at STRICT_ALIGNMENT here, and rely on later processing
2496 in layout_decl or finish_record_type to clear the bit_field indication if
2497 it is in fact not needed. */
2498 if (addressable >= 0
2499 && size
2500 && TREE_CODE (size) == INTEGER_CST
2501 && TREE_CODE (TYPE_SIZE (field_type)) == INTEGER_CST
2502 && (!tree_int_cst_equal (size, TYPE_SIZE (field_type))
2503 || (pos && !value_factor_p (pos, TYPE_ALIGN (field_type)))
2504 || packed
2505 || (TYPE_ALIGN (record_type) != 0
2506 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))))
2507 {
2508 DECL_BIT_FIELD (field_decl) = 1;
2509 DECL_SIZE (field_decl) = size;
2510 if (!packed && !pos)
2511 {
2512 if (TYPE_ALIGN (record_type) != 0
2513 && TYPE_ALIGN (record_type) < TYPE_ALIGN (field_type))
2514 DECL_ALIGN (field_decl) = TYPE_ALIGN (record_type);
2515 else
2516 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2517 }
2518 }
2519
2520 DECL_PACKED (field_decl) = pos ? DECL_BIT_FIELD (field_decl) : packed;
2521
2522 /* Bump the alignment if need be, either for bitfield/packing purposes or
2523 to satisfy the type requirements if no such consideration applies. When
2524 we get the alignment from the type, indicate if this is from an explicit
2525 user request, which prevents stor-layout from lowering it later on. */
2526 {
2527 unsigned int bit_align
2528 = (DECL_BIT_FIELD (field_decl) ? 1
2529 : packed && TYPE_MODE (field_type) != BLKmode ? BITS_PER_UNIT : 0);
2530
2531 if (bit_align > DECL_ALIGN (field_decl))
2532 DECL_ALIGN (field_decl) = bit_align;
2533 else if (!bit_align && TYPE_ALIGN (field_type) > DECL_ALIGN (field_decl))
2534 {
2535 DECL_ALIGN (field_decl) = TYPE_ALIGN (field_type);
2536 DECL_USER_ALIGN (field_decl) = TYPE_USER_ALIGN (field_type);
2537 }
2538 }
2539
2540 if (pos)
2541 {
2542 /* We need to pass in the alignment the DECL is known to have.
2543 This is the lowest-order bit set in POS, but no more than
2544 the alignment of the record, if one is specified. Note
2545 that an alignment of 0 is taken as infinite. */
2546 unsigned int known_align;
2547
2548 if (tree_fits_uhwi_p (pos))
2549 known_align = tree_to_uhwi (pos) & - tree_to_uhwi (pos);
2550 else
2551 known_align = BITS_PER_UNIT;
2552
2553 if (TYPE_ALIGN (record_type)
2554 && (known_align == 0 || known_align > TYPE_ALIGN (record_type)))
2555 known_align = TYPE_ALIGN (record_type);
2556
2557 layout_decl (field_decl, known_align);
2558 SET_DECL_OFFSET_ALIGN (field_decl,
2559 tree_fits_uhwi_p (pos) ? BIGGEST_ALIGNMENT
2560 : BITS_PER_UNIT);
2561 pos_from_bit (&DECL_FIELD_OFFSET (field_decl),
2562 &DECL_FIELD_BIT_OFFSET (field_decl),
2563 DECL_OFFSET_ALIGN (field_decl), pos);
2564 }
2565
2566 /* In addition to what our caller says, claim the field is addressable if we
2567 know that its type is not suitable.
2568
2569 The field may also be "technically" nonaddressable, meaning that even if
2570 we attempt to take the field's address we will actually get the address
2571 of a copy. This is the case for true bitfields, but the DECL_BIT_FIELD
2572 value we have at this point is not accurate enough, so we don't account
2573 for this here and let finish_record_type decide. */
2574 if (!addressable && !type_for_nonaliased_component_p (field_type))
2575 addressable = 1;
2576
2577 DECL_NONADDRESSABLE_P (field_decl) = !addressable;
2578
2579 return field_decl;
2580 }
2581 \f
2582 /* Return a PARM_DECL node. PARAM_NAME is the name of the parameter and
2583 PARAM_TYPE is its type. READONLY is true if the parameter is readonly
2584 (either an In parameter or an address of a pass-by-ref parameter). */
2585
2586 tree
2587 create_param_decl (tree param_name, tree param_type, bool readonly)
2588 {
2589 tree param_decl = build_decl (input_location,
2590 PARM_DECL, param_name, param_type);
2591
2592 /* Honor TARGET_PROMOTE_PROTOTYPES like the C compiler, as not doing so
2593 can lead to various ABI violations. */
2594 if (targetm.calls.promote_prototypes (NULL_TREE)
2595 && INTEGRAL_TYPE_P (param_type)
2596 && TYPE_PRECISION (param_type) < TYPE_PRECISION (integer_type_node))
2597 {
2598 /* We have to be careful about biased types here. Make a subtype
2599 of integer_type_node with the proper biasing. */
2600 if (TREE_CODE (param_type) == INTEGER_TYPE
2601 && TYPE_BIASED_REPRESENTATION_P (param_type))
2602 {
2603 tree subtype
2604 = make_unsigned_type (TYPE_PRECISION (integer_type_node));
2605 TREE_TYPE (subtype) = integer_type_node;
2606 TYPE_BIASED_REPRESENTATION_P (subtype) = 1;
2607 SET_TYPE_RM_MIN_VALUE (subtype, TYPE_MIN_VALUE (param_type));
2608 SET_TYPE_RM_MAX_VALUE (subtype, TYPE_MAX_VALUE (param_type));
2609 param_type = subtype;
2610 }
2611 else
2612 param_type = integer_type_node;
2613 }
2614
2615 DECL_ARG_TYPE (param_decl) = param_type;
2616 TREE_READONLY (param_decl) = readonly;
2617 return param_decl;
2618 }
2619 \f
2620 /* Process the attributes in ATTR_LIST for NODE, which is either a DECL or
2621 a TYPE. If IN_PLACE is true, the tree pointed to by NODE should not be
2622 changed. GNAT_NODE is used for the position of error messages. */
2623
2624 void
2625 process_attributes (tree *node, struct attrib **attr_list, bool in_place,
2626 Node_Id gnat_node)
2627 {
2628 struct attrib *attr;
2629
2630 for (attr = *attr_list; attr; attr = attr->next)
2631 switch (attr->type)
2632 {
2633 case ATTR_MACHINE_ATTRIBUTE:
2634 Sloc_to_locus (Sloc (gnat_node), &input_location);
2635 decl_attributes (node, tree_cons (attr->name, attr->args, NULL_TREE),
2636 in_place ? ATTR_FLAG_TYPE_IN_PLACE : 0);
2637 break;
2638
2639 case ATTR_LINK_ALIAS:
2640 if (!DECL_EXTERNAL (*node))
2641 {
2642 TREE_STATIC (*node) = 1;
2643 assemble_alias (*node, attr->name);
2644 }
2645 break;
2646
2647 case ATTR_WEAK_EXTERNAL:
2648 if (SUPPORTS_WEAK)
2649 declare_weak (*node);
2650 else
2651 post_error ("?weak declarations not supported on this target",
2652 attr->error_point);
2653 break;
2654
2655 case ATTR_LINK_SECTION:
2656 if (targetm_common.have_named_sections)
2657 {
2658 set_decl_section_name (*node, IDENTIFIER_POINTER (attr->name));
2659 DECL_COMMON (*node) = 0;
2660 }
2661 else
2662 post_error ("?section attributes are not supported for this target",
2663 attr->error_point);
2664 break;
2665
2666 case ATTR_LINK_CONSTRUCTOR:
2667 DECL_STATIC_CONSTRUCTOR (*node) = 1;
2668 TREE_USED (*node) = 1;
2669 break;
2670
2671 case ATTR_LINK_DESTRUCTOR:
2672 DECL_STATIC_DESTRUCTOR (*node) = 1;
2673 TREE_USED (*node) = 1;
2674 break;
2675
2676 case ATTR_THREAD_LOCAL_STORAGE:
2677 set_decl_tls_model (*node, decl_default_tls_model (*node));
2678 DECL_COMMON (*node) = 0;
2679 break;
2680 }
2681
2682 *attr_list = NULL;
2683 }
2684 \f
2685 /* Record DECL as a global renaming pointer. */
2686
2687 void
2688 record_global_renaming_pointer (tree decl)
2689 {
2690 gcc_assert (!DECL_LOOP_PARM_P (decl) && DECL_RENAMED_OBJECT (decl));
2691 vec_safe_push (global_renaming_pointers, decl);
2692 }
2693
2694 /* Invalidate the global renaming pointers that are not constant, lest their
2695 renamed object contains SAVE_EXPRs tied to an elaboration routine. Note
2696 that we should not blindly invalidate everything here because of the need
2697 to propagate constant values through renaming. */
2698
2699 void
2700 invalidate_global_renaming_pointers (void)
2701 {
2702 unsigned int i;
2703 tree iter;
2704
2705 if (global_renaming_pointers == NULL)
2706 return;
2707
2708 FOR_EACH_VEC_ELT (*global_renaming_pointers, i, iter)
2709 if (!TREE_CONSTANT (DECL_RENAMED_OBJECT (iter)))
2710 SET_DECL_RENAMED_OBJECT (iter, NULL_TREE);
2711
2712 vec_free (global_renaming_pointers);
2713 }
2714
2715 /* Return true if VALUE is a known to be a multiple of FACTOR, which must be
2716 a power of 2. */
2717
2718 bool
2719 value_factor_p (tree value, HOST_WIDE_INT factor)
2720 {
2721 if (tree_fits_uhwi_p (value))
2722 return tree_to_uhwi (value) % factor == 0;
2723
2724 if (TREE_CODE (value) == MULT_EXPR)
2725 return (value_factor_p (TREE_OPERAND (value, 0), factor)
2726 || value_factor_p (TREE_OPERAND (value, 1), factor));
2727
2728 return false;
2729 }
2730
2731 /* Return whether GNAT_NODE is a defining identifier for a renaming that comes
2732 from the parameter association for the instantiation of a generic. We do
2733 not want to emit source location for them: the code generated for their
2734 initialization is likely to disturb debugging. */
2735
2736 bool
2737 renaming_from_generic_instantiation_p (Node_Id gnat_node)
2738 {
2739 if (Nkind (gnat_node) != N_Defining_Identifier
2740 || !IN (Ekind (gnat_node), Object_Kind)
2741 || Comes_From_Source (gnat_node)
2742 || !Present (Renamed_Object (gnat_node)))
2743 return false;
2744
2745 /* Get the object declaration of the renamed object, if any and if the
2746 renamed object is a mere identifier. */
2747 gnat_node = Renamed_Object (gnat_node);
2748 if (Nkind (gnat_node) != N_Identifier)
2749 return false;
2750
2751 gnat_node = Entity (gnat_node);
2752 if (!Present (Parent (gnat_node)))
2753 return false;
2754
2755 gnat_node = Parent (gnat_node);
2756 return
2757 (Present (gnat_node)
2758 && Nkind (gnat_node) == N_Object_Declaration
2759 && Present (Corresponding_Generic_Association (gnat_node)));
2760 }
2761
2762 /* Defer the initialization of DECL's DECL_CONTEXT attribute, scheduling to
2763 feed it with the elaboration of GNAT_SCOPE. */
2764
2765 static struct deferred_decl_context_node *
2766 add_deferred_decl_context (tree decl, Entity_Id gnat_scope, int force_global)
2767 {
2768 struct deferred_decl_context_node *new_node;
2769
2770 new_node
2771 = (struct deferred_decl_context_node * ) xmalloc (sizeof (*new_node));
2772 new_node->decl = decl;
2773 new_node->gnat_scope = gnat_scope;
2774 new_node->force_global = force_global;
2775 new_node->types.create (1);
2776 new_node->next = deferred_decl_context_queue;
2777 deferred_decl_context_queue = new_node;
2778 return new_node;
2779 }
2780
2781 /* Defer the initialization of TYPE's TYPE_CONTEXT attribute, scheduling to
2782 feed it with the DECL_CONTEXT computed as part of N as soon as it is
2783 computed. */
2784
2785 static void
2786 add_deferred_type_context (struct deferred_decl_context_node *n, tree type)
2787 {
2788 n->types.safe_push (type);
2789 }
2790
2791 /* Get the GENERIC node corresponding to GNAT_SCOPE, if available. Return
2792 NULL_TREE if it is not available. */
2793
2794 static tree
2795 compute_deferred_decl_context (Entity_Id gnat_scope)
2796 {
2797 tree context;
2798
2799 if (present_gnu_tree (gnat_scope))
2800 context = get_gnu_tree (gnat_scope);
2801 else
2802 return NULL_TREE;
2803
2804 if (TREE_CODE (context) == TYPE_DECL)
2805 {
2806 const tree context_type = TREE_TYPE (context);
2807
2808 /* Skip dummy types: only the final ones can appear in the context
2809 chain. */
2810 if (TYPE_DUMMY_P (context_type))
2811 return NULL_TREE;
2812
2813 /* ..._TYPE nodes are more useful than TYPE_DECL nodes in the context
2814 chain. */
2815 else
2816 context = context_type;
2817 }
2818
2819 return context;
2820 }
2821
2822 /* Try to process all deferred nodes in the queue. Keep in the queue the ones
2823 that cannot be processed yet, remove the other ones. If FORCE is true,
2824 force the processing for all nodes, use the global context when nodes don't
2825 have a GNU translation. */
2826
2827 void
2828 process_deferred_decl_context (bool force)
2829 {
2830 struct deferred_decl_context_node **it = &deferred_decl_context_queue;
2831 struct deferred_decl_context_node *node;
2832
2833 while (*it != NULL)
2834 {
2835 bool processed = false;
2836 tree context = NULL_TREE;
2837 Entity_Id gnat_scope;
2838
2839 node = *it;
2840
2841 /* If FORCE, get the innermost elaborated scope. Otherwise, just try to
2842 get the first scope. */
2843 gnat_scope = node->gnat_scope;
2844 while (Present (gnat_scope))
2845 {
2846 context = compute_deferred_decl_context (gnat_scope);
2847 if (!force || context != NULL_TREE)
2848 break;
2849 gnat_scope = get_debug_scope (gnat_scope, NULL);
2850 }
2851
2852 /* Imported declarations must not be in a local context (i.e. not inside
2853 a function). */
2854 if (context != NULL_TREE && node->force_global > 0)
2855 {
2856 tree ctx = context;
2857
2858 while (ctx != NULL_TREE)
2859 {
2860 gcc_assert (TREE_CODE (ctx) != FUNCTION_DECL);
2861 ctx = (DECL_P (ctx))
2862 ? DECL_CONTEXT (ctx)
2863 : TYPE_CONTEXT (ctx);
2864 }
2865 }
2866
2867 /* If FORCE, we want to get rid of all nodes in the queue: in case there
2868 was no elaborated scope, use the global context. */
2869 if (force && context == NULL_TREE)
2870 context = get_global_context ();
2871
2872 if (context != NULL_TREE)
2873 {
2874 tree t;
2875 int i;
2876
2877 DECL_CONTEXT (node->decl) = context;
2878
2879 /* Propagate it to the TYPE_CONTEXT attributes of the requested
2880 ..._TYPE nodes. */
2881 FOR_EACH_VEC_ELT (node->types, i, t)
2882 {
2883 gnat_set_type_context (t, context);
2884 }
2885 processed = true;
2886 }
2887
2888 /* If this node has been successfuly processed, remove it from the
2889 queue. Then move to the next node. */
2890 if (processed)
2891 {
2892 *it = node->next;
2893 node->types.release ();
2894 free (node);
2895 }
2896 else
2897 it = &node->next;
2898 }
2899 }
2900
2901
2902 /* Return VALUE scaled by the biggest power-of-2 factor of EXPR. */
2903
2904 static unsigned int
2905 scale_by_factor_of (tree expr, unsigned int value)
2906 {
2907 expr = remove_conversions (expr, true);
2908
2909 /* An expression which is a bitwise AND with a mask has a power-of-2 factor
2910 corresponding to the number of trailing zeros of the mask. */
2911 if (TREE_CODE (expr) == BIT_AND_EXPR
2912 && TREE_CODE (TREE_OPERAND (expr, 1)) == INTEGER_CST)
2913 {
2914 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (TREE_OPERAND (expr, 1));
2915 unsigned int i = 0;
2916
2917 while ((mask & 1) == 0 && i < HOST_BITS_PER_WIDE_INT)
2918 {
2919 mask >>= 1;
2920 value *= 2;
2921 i++;
2922 }
2923 }
2924
2925 return value;
2926 }
2927
2928 /* Given two consecutive field decls PREV_FIELD and CURR_FIELD, return true
2929 unless we can prove these 2 fields are laid out in such a way that no gap
2930 exist between the end of PREV_FIELD and the beginning of CURR_FIELD. OFFSET
2931 is the distance in bits between the end of PREV_FIELD and the starting
2932 position of CURR_FIELD. It is ignored if null. */
2933
2934 static bool
2935 potential_alignment_gap (tree prev_field, tree curr_field, tree offset)
2936 {
2937 /* If this is the first field of the record, there cannot be any gap */
2938 if (!prev_field)
2939 return false;
2940
2941 /* If the previous field is a union type, then return false: The only
2942 time when such a field is not the last field of the record is when
2943 there are other components at fixed positions after it (meaning there
2944 was a rep clause for every field), in which case we don't want the
2945 alignment constraint to override them. */
2946 if (TREE_CODE (TREE_TYPE (prev_field)) == QUAL_UNION_TYPE)
2947 return false;
2948
2949 /* If the distance between the end of prev_field and the beginning of
2950 curr_field is constant, then there is a gap if the value of this
2951 constant is not null. */
2952 if (offset && tree_fits_uhwi_p (offset))
2953 return !integer_zerop (offset);
2954
2955 /* If the size and position of the previous field are constant,
2956 then check the sum of this size and position. There will be a gap
2957 iff it is not multiple of the current field alignment. */
2958 if (tree_fits_uhwi_p (DECL_SIZE (prev_field))
2959 && tree_fits_uhwi_p (bit_position (prev_field)))
2960 return ((tree_to_uhwi (bit_position (prev_field))
2961 + tree_to_uhwi (DECL_SIZE (prev_field)))
2962 % DECL_ALIGN (curr_field) != 0);
2963
2964 /* If both the position and size of the previous field are multiples
2965 of the current field alignment, there cannot be any gap. */
2966 if (value_factor_p (bit_position (prev_field), DECL_ALIGN (curr_field))
2967 && value_factor_p (DECL_SIZE (prev_field), DECL_ALIGN (curr_field)))
2968 return false;
2969
2970 /* Fallback, return that there may be a potential gap */
2971 return true;
2972 }
2973
2974 /* Return a LABEL_DECL with LABEL_NAME. GNAT_NODE is used for the position
2975 of the decl. */
2976
2977 tree
2978 create_label_decl (tree label_name, Node_Id gnat_node)
2979 {
2980 tree label_decl
2981 = build_decl (input_location, LABEL_DECL, label_name, void_type_node);
2982
2983 DECL_MODE (label_decl) = VOIDmode;
2984
2985 /* Add this decl to the current binding level. */
2986 gnat_pushdecl (label_decl, gnat_node);
2987
2988 return label_decl;
2989 }
2990 \f
2991 /* Return a FUNCTION_DECL node. SUBPROG_NAME is the name of the subprogram,
2992 ASM_NAME is its assembler name, SUBPROG_TYPE is its type (a FUNCTION_TYPE
2993 node), PARAM_DECL_LIST is the list of the subprogram arguments (a list of
2994 PARM_DECL nodes chained through the DECL_CHAIN field).
2995
2996 INLINE_STATUS, PUBLIC_FLAG, EXTERN_FLAG, ARTIFICIAL_FLAG and ATTR_LIST are
2997 used to set the appropriate fields in the FUNCTION_DECL. GNAT_NODE is
2998 used for the position of the decl. */
2999
3000 tree
3001 create_subprog_decl (tree subprog_name, tree asm_name, tree subprog_type,
3002 tree param_decl_list, enum inline_status_t inline_status,
3003 bool public_flag, bool extern_flag, bool artificial_flag,
3004 struct attrib *attr_list, Node_Id gnat_node)
3005 {
3006 tree subprog_decl = build_decl (input_location, FUNCTION_DECL, subprog_name,
3007 subprog_type);
3008 tree result_decl = build_decl (input_location, RESULT_DECL, NULL_TREE,
3009 TREE_TYPE (subprog_type));
3010 DECL_ARGUMENTS (subprog_decl) = param_decl_list;
3011
3012 /* If this is a non-inline function nested inside an inlined external
3013 function, we cannot honor both requests without cloning the nested
3014 function in the current unit since it is private to the other unit.
3015 We could inline the nested function as well but it's probably better
3016 to err on the side of too little inlining. */
3017 if (inline_status != is_enabled
3018 && !public_flag
3019 && current_function_decl
3020 && DECL_DECLARED_INLINE_P (current_function_decl)
3021 && DECL_EXTERNAL (current_function_decl))
3022 DECL_DECLARED_INLINE_P (current_function_decl) = 0;
3023
3024 DECL_ARTIFICIAL (subprog_decl) = artificial_flag;
3025 DECL_EXTERNAL (subprog_decl) = extern_flag;
3026
3027 switch (inline_status)
3028 {
3029 case is_suppressed:
3030 DECL_UNINLINABLE (subprog_decl) = 1;
3031 break;
3032
3033 case is_disabled:
3034 break;
3035
3036 case is_required:
3037 if (Back_End_Inlining)
3038 decl_attributes (&subprog_decl,
3039 tree_cons (get_identifier ("always_inline"),
3040 NULL_TREE, NULL_TREE),
3041 ATTR_FLAG_TYPE_IN_PLACE);
3042
3043 /* ... fall through ... */
3044
3045 case is_enabled:
3046 DECL_DECLARED_INLINE_P (subprog_decl) = 1;
3047 DECL_NO_INLINE_WARNING_P (subprog_decl) = artificial_flag;
3048 break;
3049
3050 default:
3051 gcc_unreachable ();
3052 }
3053
3054 TREE_PUBLIC (subprog_decl) = public_flag;
3055 TREE_READONLY (subprog_decl) = TYPE_READONLY (subprog_type);
3056 TREE_THIS_VOLATILE (subprog_decl) = TYPE_VOLATILE (subprog_type);
3057 TREE_SIDE_EFFECTS (subprog_decl) = TYPE_VOLATILE (subprog_type);
3058
3059 DECL_ARTIFICIAL (result_decl) = 1;
3060 DECL_IGNORED_P (result_decl) = 1;
3061 DECL_BY_REFERENCE (result_decl) = TREE_ADDRESSABLE (subprog_type);
3062 DECL_RESULT (subprog_decl) = result_decl;
3063
3064 if (asm_name)
3065 {
3066 SET_DECL_ASSEMBLER_NAME (subprog_decl, asm_name);
3067
3068 /* The expand_main_function circuitry expects "main_identifier_node" to
3069 designate the DECL_NAME of the 'main' entry point, in turn expected
3070 to be declared as the "main" function literally by default. Ada
3071 program entry points are typically declared with a different name
3072 within the binder generated file, exported as 'main' to satisfy the
3073 system expectations. Force main_identifier_node in this case. */
3074 if (asm_name == main_identifier_node)
3075 DECL_NAME (subprog_decl) = main_identifier_node;
3076 }
3077
3078 process_attributes (&subprog_decl, &attr_list, true, gnat_node);
3079
3080 /* Add this decl to the current binding level. */
3081 gnat_pushdecl (subprog_decl, gnat_node);
3082
3083 /* Output the assembler code and/or RTL for the declaration. */
3084 rest_of_decl_compilation (subprog_decl, global_bindings_p (), 0);
3085
3086 return subprog_decl;
3087 }
3088 \f
3089 /* Set up the framework for generating code for SUBPROG_DECL, a subprogram
3090 body. This routine needs to be invoked before processing the declarations
3091 appearing in the subprogram. */
3092
3093 void
3094 begin_subprog_body (tree subprog_decl)
3095 {
3096 tree param_decl;
3097
3098 announce_function (subprog_decl);
3099
3100 /* This function is being defined. */
3101 TREE_STATIC (subprog_decl) = 1;
3102
3103 current_function_decl = subprog_decl;
3104
3105 /* Enter a new binding level and show that all the parameters belong to
3106 this function. */
3107 gnat_pushlevel ();
3108
3109 for (param_decl = DECL_ARGUMENTS (subprog_decl); param_decl;
3110 param_decl = DECL_CHAIN (param_decl))
3111 DECL_CONTEXT (param_decl) = subprog_decl;
3112
3113 make_decl_rtl (subprog_decl);
3114 }
3115
3116 /* Finish translating the current subprogram and set its BODY. */
3117
3118 void
3119 end_subprog_body (tree body)
3120 {
3121 tree fndecl = current_function_decl;
3122
3123 /* Attach the BLOCK for this level to the function and pop the level. */
3124 BLOCK_SUPERCONTEXT (current_binding_level->block) = fndecl;
3125 DECL_INITIAL (fndecl) = current_binding_level->block;
3126 gnat_poplevel ();
3127
3128 /* Mark the RESULT_DECL as being in this subprogram. */
3129 DECL_CONTEXT (DECL_RESULT (fndecl)) = fndecl;
3130
3131 /* The body should be a BIND_EXPR whose BLOCK is the top-level one. */
3132 if (TREE_CODE (body) == BIND_EXPR)
3133 {
3134 BLOCK_SUPERCONTEXT (BIND_EXPR_BLOCK (body)) = fndecl;
3135 DECL_INITIAL (fndecl) = BIND_EXPR_BLOCK (body);
3136 }
3137
3138 DECL_SAVED_TREE (fndecl) = body;
3139
3140 current_function_decl = decl_function_context (fndecl);
3141 }
3142
3143 /* Wrap up compilation of SUBPROG_DECL, a subprogram body. */
3144
3145 void
3146 rest_of_subprog_body_compilation (tree subprog_decl)
3147 {
3148 /* We cannot track the location of errors past this point. */
3149 error_gnat_node = Empty;
3150
3151 /* If we're only annotating types, don't actually compile this function. */
3152 if (type_annotate_only)
3153 return;
3154
3155 /* Dump functions before gimplification. */
3156 dump_function (TDI_original, subprog_decl);
3157
3158 if (!decl_function_context (subprog_decl))
3159 cgraph_finalize_function (subprog_decl, false);
3160 else
3161 /* Register this function with cgraph just far enough to get it
3162 added to our parent's nested function list. */
3163 (void) cgraph_node::get_create (subprog_decl);
3164 }
3165
3166 tree
3167 gnat_builtin_function (tree decl)
3168 {
3169 gnat_pushdecl (decl, Empty);
3170 return decl;
3171 }
3172
3173 /* Return an integer type with the number of bits of precision given by
3174 PRECISION. UNSIGNEDP is nonzero if the type is unsigned; otherwise
3175 it is a signed type. */
3176
3177 tree
3178 gnat_type_for_size (unsigned precision, int unsignedp)
3179 {
3180 tree t;
3181 char type_name[20];
3182
3183 if (precision <= 2 * MAX_BITS_PER_WORD
3184 && signed_and_unsigned_types[precision][unsignedp])
3185 return signed_and_unsigned_types[precision][unsignedp];
3186
3187 if (unsignedp)
3188 t = make_unsigned_type (precision);
3189 else
3190 t = make_signed_type (precision);
3191
3192 if (precision <= 2 * MAX_BITS_PER_WORD)
3193 signed_and_unsigned_types[precision][unsignedp] = t;
3194
3195 if (!TYPE_NAME (t))
3196 {
3197 sprintf (type_name, "%sSIGNED_%u", unsignedp ? "UN" : "", precision);
3198 TYPE_NAME (t) = get_identifier (type_name);
3199 }
3200
3201 return t;
3202 }
3203
3204 /* Likewise for floating-point types. */
3205
3206 static tree
3207 float_type_for_precision (int precision, enum machine_mode mode)
3208 {
3209 tree t;
3210 char type_name[20];
3211
3212 if (float_types[(int) mode])
3213 return float_types[(int) mode];
3214
3215 float_types[(int) mode] = t = make_node (REAL_TYPE);
3216 TYPE_PRECISION (t) = precision;
3217 layout_type (t);
3218
3219 gcc_assert (TYPE_MODE (t) == mode);
3220 if (!TYPE_NAME (t))
3221 {
3222 sprintf (type_name, "FLOAT_%d", precision);
3223 TYPE_NAME (t) = get_identifier (type_name);
3224 }
3225
3226 return t;
3227 }
3228
3229 /* Return a data type that has machine mode MODE. UNSIGNEDP selects
3230 an unsigned type; otherwise a signed type is returned. */
3231
3232 tree
3233 gnat_type_for_mode (enum machine_mode mode, int unsignedp)
3234 {
3235 if (mode == BLKmode)
3236 return NULL_TREE;
3237
3238 if (mode == VOIDmode)
3239 return void_type_node;
3240
3241 if (COMPLEX_MODE_P (mode))
3242 return NULL_TREE;
3243
3244 if (SCALAR_FLOAT_MODE_P (mode))
3245 return float_type_for_precision (GET_MODE_PRECISION (mode), mode);
3246
3247 if (SCALAR_INT_MODE_P (mode))
3248 return gnat_type_for_size (GET_MODE_BITSIZE (mode), unsignedp);
3249
3250 if (VECTOR_MODE_P (mode))
3251 {
3252 enum machine_mode inner_mode = GET_MODE_INNER (mode);
3253 tree inner_type = gnat_type_for_mode (inner_mode, unsignedp);
3254 if (inner_type)
3255 return build_vector_type_for_mode (inner_type, mode);
3256 }
3257
3258 return NULL_TREE;
3259 }
3260
3261 /* Return the unsigned version of a TYPE_NODE, a scalar type. */
3262
3263 tree
3264 gnat_unsigned_type (tree type_node)
3265 {
3266 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 1);
3267
3268 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3269 {
3270 type = copy_node (type);
3271 TREE_TYPE (type) = type_node;
3272 }
3273 else if (TREE_TYPE (type_node)
3274 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3275 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3276 {
3277 type = copy_node (type);
3278 TREE_TYPE (type) = TREE_TYPE (type_node);
3279 }
3280
3281 return type;
3282 }
3283
3284 /* Return the signed version of a TYPE_NODE, a scalar type. */
3285
3286 tree
3287 gnat_signed_type (tree type_node)
3288 {
3289 tree type = gnat_type_for_size (TYPE_PRECISION (type_node), 0);
3290
3291 if (TREE_CODE (type_node) == INTEGER_TYPE && TYPE_MODULAR_P (type_node))
3292 {
3293 type = copy_node (type);
3294 TREE_TYPE (type) = type_node;
3295 }
3296 else if (TREE_TYPE (type_node)
3297 && TREE_CODE (TREE_TYPE (type_node)) == INTEGER_TYPE
3298 && TYPE_MODULAR_P (TREE_TYPE (type_node)))
3299 {
3300 type = copy_node (type);
3301 TREE_TYPE (type) = TREE_TYPE (type_node);
3302 }
3303
3304 return type;
3305 }
3306
3307 /* Return 1 if the types T1 and T2 are compatible, i.e. if they can be
3308 transparently converted to each other. */
3309
3310 int
3311 gnat_types_compatible_p (tree t1, tree t2)
3312 {
3313 enum tree_code code;
3314
3315 /* This is the default criterion. */
3316 if (TYPE_MAIN_VARIANT (t1) == TYPE_MAIN_VARIANT (t2))
3317 return 1;
3318
3319 /* We only check structural equivalence here. */
3320 if ((code = TREE_CODE (t1)) != TREE_CODE (t2))
3321 return 0;
3322
3323 /* Vector types are also compatible if they have the same number of subparts
3324 and the same form of (scalar) element type. */
3325 if (code == VECTOR_TYPE
3326 && TYPE_VECTOR_SUBPARTS (t1) == TYPE_VECTOR_SUBPARTS (t2)
3327 && TREE_CODE (TREE_TYPE (t1)) == TREE_CODE (TREE_TYPE (t2))
3328 && TYPE_PRECISION (TREE_TYPE (t1)) == TYPE_PRECISION (TREE_TYPE (t2)))
3329 return 1;
3330
3331 /* Array types are also compatible if they are constrained and have the same
3332 domain(s) and the same component type. */
3333 if (code == ARRAY_TYPE
3334 && (TYPE_DOMAIN (t1) == TYPE_DOMAIN (t2)
3335 || (TYPE_DOMAIN (t1)
3336 && TYPE_DOMAIN (t2)
3337 && tree_int_cst_equal (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)),
3338 TYPE_MIN_VALUE (TYPE_DOMAIN (t2)))
3339 && tree_int_cst_equal (TYPE_MAX_VALUE (TYPE_DOMAIN (t1)),
3340 TYPE_MAX_VALUE (TYPE_DOMAIN (t2)))))
3341 && (TREE_TYPE (t1) == TREE_TYPE (t2)
3342 || (TREE_CODE (TREE_TYPE (t1)) == ARRAY_TYPE
3343 && gnat_types_compatible_p (TREE_TYPE (t1), TREE_TYPE (t2)))))
3344 return 1;
3345
3346 return 0;
3347 }
3348
3349 /* Return true if EXPR is a useless type conversion. */
3350
3351 bool
3352 gnat_useless_type_conversion (tree expr)
3353 {
3354 if (CONVERT_EXPR_P (expr)
3355 || TREE_CODE (expr) == VIEW_CONVERT_EXPR
3356 || TREE_CODE (expr) == NON_LVALUE_EXPR)
3357 return gnat_types_compatible_p (TREE_TYPE (expr),
3358 TREE_TYPE (TREE_OPERAND (expr, 0)));
3359
3360 return false;
3361 }
3362
3363 /* Return true if T, a FUNCTION_TYPE, has the specified list of flags. */
3364
3365 bool
3366 fntype_same_flags_p (const_tree t, tree cico_list, bool return_unconstrained_p,
3367 bool return_by_direct_ref_p, bool return_by_invisi_ref_p)
3368 {
3369 return TYPE_CI_CO_LIST (t) == cico_list
3370 && TYPE_RETURN_UNCONSTRAINED_P (t) == return_unconstrained_p
3371 && TYPE_RETURN_BY_DIRECT_REF_P (t) == return_by_direct_ref_p
3372 && TREE_ADDRESSABLE (t) == return_by_invisi_ref_p;
3373 }
3374 \f
3375 /* EXP is an expression for the size of an object. If this size contains
3376 discriminant references, replace them with the maximum (if MAX_P) or
3377 minimum (if !MAX_P) possible value of the discriminant. */
3378
3379 tree
3380 max_size (tree exp, bool max_p)
3381 {
3382 enum tree_code code = TREE_CODE (exp);
3383 tree type = TREE_TYPE (exp);
3384
3385 switch (TREE_CODE_CLASS (code))
3386 {
3387 case tcc_declaration:
3388 case tcc_constant:
3389 return exp;
3390
3391 case tcc_vl_exp:
3392 if (code == CALL_EXPR)
3393 {
3394 tree t, *argarray;
3395 int n, i;
3396
3397 t = maybe_inline_call_in_expr (exp);
3398 if (t)
3399 return max_size (t, max_p);
3400
3401 n = call_expr_nargs (exp);
3402 gcc_assert (n > 0);
3403 argarray = XALLOCAVEC (tree, n);
3404 for (i = 0; i < n; i++)
3405 argarray[i] = max_size (CALL_EXPR_ARG (exp, i), max_p);
3406 return build_call_array (type, CALL_EXPR_FN (exp), n, argarray);
3407 }
3408 break;
3409
3410 case tcc_reference:
3411 /* If this contains a PLACEHOLDER_EXPR, it is the thing we want to
3412 modify. Otherwise, we treat it like a variable. */
3413 if (CONTAINS_PLACEHOLDER_P (exp))
3414 {
3415 tree val_type = TREE_TYPE (TREE_OPERAND (exp, 1));
3416 tree val = (max_p ? TYPE_MAX_VALUE (type) : TYPE_MIN_VALUE (type));
3417 return max_size (convert (get_base_type (val_type), val), true);
3418 }
3419
3420 return exp;
3421
3422 case tcc_comparison:
3423 return max_p ? size_one_node : size_zero_node;
3424
3425 case tcc_unary:
3426 if (code == NON_LVALUE_EXPR)
3427 return max_size (TREE_OPERAND (exp, 0), max_p);
3428
3429 return fold_build1 (code, type,
3430 max_size (TREE_OPERAND (exp, 0),
3431 code == NEGATE_EXPR ? !max_p : max_p));
3432
3433 case tcc_binary:
3434 {
3435 tree lhs = max_size (TREE_OPERAND (exp, 0), max_p);
3436 tree rhs = max_size (TREE_OPERAND (exp, 1),
3437 code == MINUS_EXPR ? !max_p : max_p);
3438
3439 /* Special-case wanting the maximum value of a MIN_EXPR.
3440 In that case, if one side overflows, return the other. */
3441 if (max_p && code == MIN_EXPR)
3442 {
3443 if (TREE_CODE (rhs) == INTEGER_CST && TREE_OVERFLOW (rhs))
3444 return lhs;
3445
3446 if (TREE_CODE (lhs) == INTEGER_CST && TREE_OVERFLOW (lhs))
3447 return rhs;
3448 }
3449
3450 /* Likewise, handle a MINUS_EXPR or PLUS_EXPR with the LHS
3451 overflowing and the RHS a variable. */
3452 if ((code == MINUS_EXPR || code == PLUS_EXPR)
3453 && TREE_CODE (lhs) == INTEGER_CST
3454 && TREE_OVERFLOW (lhs)
3455 && !TREE_CONSTANT (rhs))
3456 return lhs;
3457
3458 return size_binop (code, lhs, rhs);
3459 }
3460
3461 case tcc_expression:
3462 switch (TREE_CODE_LENGTH (code))
3463 {
3464 case 1:
3465 if (code == SAVE_EXPR)
3466 return exp;
3467
3468 return fold_build1 (code, type,
3469 max_size (TREE_OPERAND (exp, 0), max_p));
3470
3471 case 2:
3472 if (code == COMPOUND_EXPR)
3473 return max_size (TREE_OPERAND (exp, 1), max_p);
3474
3475 return fold_build2 (code, type,
3476 max_size (TREE_OPERAND (exp, 0), max_p),
3477 max_size (TREE_OPERAND (exp, 1), max_p));
3478
3479 case 3:
3480 if (code == COND_EXPR)
3481 return fold_build2 (max_p ? MAX_EXPR : MIN_EXPR, type,
3482 max_size (TREE_OPERAND (exp, 1), max_p),
3483 max_size (TREE_OPERAND (exp, 2), max_p));
3484
3485 default:
3486 break;
3487 }
3488
3489 /* Other tree classes cannot happen. */
3490 default:
3491 break;
3492 }
3493
3494 gcc_unreachable ();
3495 }
3496 \f
3497 /* Build a template of type TEMPLATE_TYPE from the array bounds of ARRAY_TYPE.
3498 EXPR is an expression that we can use to locate any PLACEHOLDER_EXPRs.
3499 Return a constructor for the template. */
3500
3501 tree
3502 build_template (tree template_type, tree array_type, tree expr)
3503 {
3504 vec<constructor_elt, va_gc> *template_elts = NULL;
3505 tree bound_list = NULL_TREE;
3506 tree field;
3507
3508 while (TREE_CODE (array_type) == RECORD_TYPE
3509 && (TYPE_PADDING_P (array_type)
3510 || TYPE_JUSTIFIED_MODULAR_P (array_type)))
3511 array_type = TREE_TYPE (TYPE_FIELDS (array_type));
3512
3513 if (TREE_CODE (array_type) == ARRAY_TYPE
3514 || (TREE_CODE (array_type) == INTEGER_TYPE
3515 && TYPE_HAS_ACTUAL_BOUNDS_P (array_type)))
3516 bound_list = TYPE_ACTUAL_BOUNDS (array_type);
3517
3518 /* First make the list for a CONSTRUCTOR for the template. Go down the
3519 field list of the template instead of the type chain because this
3520 array might be an Ada array of arrays and we can't tell where the
3521 nested arrays stop being the underlying object. */
3522
3523 for (field = TYPE_FIELDS (template_type); field;
3524 (bound_list
3525 ? (bound_list = TREE_CHAIN (bound_list))
3526 : (array_type = TREE_TYPE (array_type))),
3527 field = DECL_CHAIN (DECL_CHAIN (field)))
3528 {
3529 tree bounds, min, max;
3530
3531 /* If we have a bound list, get the bounds from there. Likewise
3532 for an ARRAY_TYPE. Otherwise, if expr is a PARM_DECL with
3533 DECL_BY_COMPONENT_PTR_P, use the bounds of the field in the template.
3534 This will give us a maximum range. */
3535 if (bound_list)
3536 bounds = TREE_VALUE (bound_list);
3537 else if (TREE_CODE (array_type) == ARRAY_TYPE)
3538 bounds = TYPE_INDEX_TYPE (TYPE_DOMAIN (array_type));
3539 else if (expr && TREE_CODE (expr) == PARM_DECL
3540 && DECL_BY_COMPONENT_PTR_P (expr))
3541 bounds = TREE_TYPE (field);
3542 else
3543 gcc_unreachable ();
3544
3545 min = convert (TREE_TYPE (field), TYPE_MIN_VALUE (bounds));
3546 max = convert (TREE_TYPE (DECL_CHAIN (field)), TYPE_MAX_VALUE (bounds));
3547
3548 /* If either MIN or MAX involve a PLACEHOLDER_EXPR, we must
3549 substitute it from OBJECT. */
3550 min = SUBSTITUTE_PLACEHOLDER_IN_EXPR (min, expr);
3551 max = SUBSTITUTE_PLACEHOLDER_IN_EXPR (max, expr);
3552
3553 CONSTRUCTOR_APPEND_ELT (template_elts, field, min);
3554 CONSTRUCTOR_APPEND_ELT (template_elts, DECL_CHAIN (field), max);
3555 }
3556
3557 return gnat_build_constructor (template_type, template_elts);
3558 }
3559 \f
3560 /* Return true if TYPE is suitable for the element type of a vector. */
3561
3562 static bool
3563 type_for_vector_element_p (tree type)
3564 {
3565 enum machine_mode mode;
3566
3567 if (!INTEGRAL_TYPE_P (type)
3568 && !SCALAR_FLOAT_TYPE_P (type)
3569 && !FIXED_POINT_TYPE_P (type))
3570 return false;
3571
3572 mode = TYPE_MODE (type);
3573 if (GET_MODE_CLASS (mode) != MODE_INT
3574 && !SCALAR_FLOAT_MODE_P (mode)
3575 && !ALL_SCALAR_FIXED_POINT_MODE_P (mode))
3576 return false;
3577
3578 return true;
3579 }
3580
3581 /* Return a vector type given the SIZE and the INNER_TYPE, or NULL_TREE if
3582 this is not possible. If ATTRIBUTE is non-zero, we are processing the
3583 attribute declaration and want to issue error messages on failure. */
3584
3585 static tree
3586 build_vector_type_for_size (tree inner_type, tree size, tree attribute)
3587 {
3588 unsigned HOST_WIDE_INT size_int, inner_size_int;
3589 int nunits;
3590
3591 /* Silently punt on variable sizes. We can't make vector types for them,
3592 need to ignore them on front-end generated subtypes of unconstrained
3593 base types, and this attribute is for binding implementors, not end
3594 users, so we should never get there from legitimate explicit uses. */
3595 if (!tree_fits_uhwi_p (size))
3596 return NULL_TREE;
3597 size_int = tree_to_uhwi (size);
3598
3599 if (!type_for_vector_element_p (inner_type))
3600 {
3601 if (attribute)
3602 error ("invalid element type for attribute %qs",
3603 IDENTIFIER_POINTER (attribute));
3604 return NULL_TREE;
3605 }
3606 inner_size_int = tree_to_uhwi (TYPE_SIZE_UNIT (inner_type));
3607
3608 if (size_int % inner_size_int)
3609 {
3610 if (attribute)
3611 error ("vector size not an integral multiple of component size");
3612 return NULL_TREE;
3613 }
3614
3615 if (size_int == 0)
3616 {
3617 if (attribute)
3618 error ("zero vector size");
3619 return NULL_TREE;
3620 }
3621
3622 nunits = size_int / inner_size_int;
3623 if (nunits & (nunits - 1))
3624 {
3625 if (attribute)
3626 error ("number of components of vector not a power of two");
3627 return NULL_TREE;
3628 }
3629
3630 return build_vector_type (inner_type, nunits);
3631 }
3632
3633 /* Return a vector type whose representative array type is ARRAY_TYPE, or
3634 NULL_TREE if this is not possible. If ATTRIBUTE is non-zero, we are
3635 processing the attribute and want to issue error messages on failure. */
3636
3637 static tree
3638 build_vector_type_for_array (tree array_type, tree attribute)
3639 {
3640 tree vector_type = build_vector_type_for_size (TREE_TYPE (array_type),
3641 TYPE_SIZE_UNIT (array_type),
3642 attribute);
3643 if (!vector_type)
3644 return NULL_TREE;
3645
3646 TYPE_REPRESENTATIVE_ARRAY (vector_type) = array_type;
3647 return vector_type;
3648 }
3649 \f
3650 /* Build a type to be used to represent an aliased object whose nominal type
3651 is an unconstrained array. This consists of a RECORD_TYPE containing a
3652 field of TEMPLATE_TYPE and a field of OBJECT_TYPE, which is an ARRAY_TYPE.
3653 If ARRAY_TYPE is that of an unconstrained array, this is used to represent
3654 an arbitrary unconstrained object. Use NAME as the name of the record.
3655 DEBUG_INFO_P is true if we need to write debug information for the type. */
3656
3657 tree
3658 build_unc_object_type (tree template_type, tree object_type, tree name,
3659 bool debug_info_p)
3660 {
3661 tree decl;
3662 tree type = make_node (RECORD_TYPE);
3663 tree template_field
3664 = create_field_decl (get_identifier ("BOUNDS"), template_type, type,
3665 NULL_TREE, NULL_TREE, 0, 1);
3666 tree array_field
3667 = create_field_decl (get_identifier ("ARRAY"), object_type, type,
3668 NULL_TREE, NULL_TREE, 0, 1);
3669
3670 TYPE_NAME (type) = name;
3671 TYPE_CONTAINS_TEMPLATE_P (type) = 1;
3672 DECL_CHAIN (template_field) = array_field;
3673 finish_record_type (type, template_field, 0, true);
3674
3675 /* Declare it now since it will never be declared otherwise. This is
3676 necessary to ensure that its subtrees are properly marked. */
3677 decl = create_type_decl (name, type, true, debug_info_p, Empty);
3678
3679 /* template_type will not be used elsewhere than here, so to keep the debug
3680 info clean and in order to avoid scoping issues, make decl its
3681 context. */
3682 gnat_set_type_context (template_type, decl);
3683
3684 return type;
3685 }
3686
3687 /* Same, taking a thin or fat pointer type instead of a template type. */
3688
3689 tree
3690 build_unc_object_type_from_ptr (tree thin_fat_ptr_type, tree object_type,
3691 tree name, bool debug_info_p)
3692 {
3693 tree template_type;
3694
3695 gcc_assert (TYPE_IS_FAT_OR_THIN_POINTER_P (thin_fat_ptr_type));
3696
3697 template_type
3698 = (TYPE_IS_FAT_POINTER_P (thin_fat_ptr_type)
3699 ? TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (thin_fat_ptr_type))))
3700 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (thin_fat_ptr_type))));
3701
3702 return
3703 build_unc_object_type (template_type, object_type, name, debug_info_p);
3704 }
3705 \f
3706 /* Update anything previously pointing to OLD_TYPE to point to NEW_TYPE.
3707 In the normal case this is just two adjustments, but we have more to
3708 do if NEW_TYPE is an UNCONSTRAINED_ARRAY_TYPE. */
3709
3710 void
3711 update_pointer_to (tree old_type, tree new_type)
3712 {
3713 tree ptr = TYPE_POINTER_TO (old_type);
3714 tree ref = TYPE_REFERENCE_TO (old_type);
3715 tree t;
3716
3717 /* If this is the main variant, process all the other variants first. */
3718 if (TYPE_MAIN_VARIANT (old_type) == old_type)
3719 for (t = TYPE_NEXT_VARIANT (old_type); t; t = TYPE_NEXT_VARIANT (t))
3720 update_pointer_to (t, new_type);
3721
3722 /* If no pointers and no references, we are done. */
3723 if (!ptr && !ref)
3724 return;
3725
3726 /* Merge the old type qualifiers in the new type.
3727
3728 Each old variant has qualifiers for specific reasons, and the new
3729 designated type as well. Each set of qualifiers represents useful
3730 information grabbed at some point, and merging the two simply unifies
3731 these inputs into the final type description.
3732
3733 Consider for instance a volatile type frozen after an access to constant
3734 type designating it; after the designated type's freeze, we get here with
3735 a volatile NEW_TYPE and a dummy OLD_TYPE with a readonly variant, created
3736 when the access type was processed. We will make a volatile and readonly
3737 designated type, because that's what it really is.
3738
3739 We might also get here for a non-dummy OLD_TYPE variant with different
3740 qualifiers than those of NEW_TYPE, for instance in some cases of pointers
3741 to private record type elaboration (see the comments around the call to
3742 this routine in gnat_to_gnu_entity <E_Access_Type>). We have to merge
3743 the qualifiers in those cases too, to avoid accidentally discarding the
3744 initial set, and will often end up with OLD_TYPE == NEW_TYPE then. */
3745 new_type
3746 = build_qualified_type (new_type,
3747 TYPE_QUALS (old_type) | TYPE_QUALS (new_type));
3748
3749 /* If old type and new type are identical, there is nothing to do. */
3750 if (old_type == new_type)
3751 return;
3752
3753 /* Otherwise, first handle the simple case. */
3754 if (TREE_CODE (new_type) != UNCONSTRAINED_ARRAY_TYPE)
3755 {
3756 tree new_ptr, new_ref;
3757
3758 /* If pointer or reference already points to new type, nothing to do.
3759 This can happen as update_pointer_to can be invoked multiple times
3760 on the same couple of types because of the type variants. */
3761 if ((ptr && TREE_TYPE (ptr) == new_type)
3762 || (ref && TREE_TYPE (ref) == new_type))
3763 return;
3764
3765 /* Chain PTR and its variants at the end. */
3766 new_ptr = TYPE_POINTER_TO (new_type);
3767 if (new_ptr)
3768 {
3769 while (TYPE_NEXT_PTR_TO (new_ptr))
3770 new_ptr = TYPE_NEXT_PTR_TO (new_ptr);
3771 TYPE_NEXT_PTR_TO (new_ptr) = ptr;
3772 }
3773 else
3774 TYPE_POINTER_TO (new_type) = ptr;
3775
3776 /* Now adjust them. */
3777 for (; ptr; ptr = TYPE_NEXT_PTR_TO (ptr))
3778 for (t = TYPE_MAIN_VARIANT (ptr); t; t = TYPE_NEXT_VARIANT (t))
3779 {
3780 TREE_TYPE (t) = new_type;
3781 if (TYPE_NULL_BOUNDS (t))
3782 TREE_TYPE (TREE_OPERAND (TYPE_NULL_BOUNDS (t), 0)) = new_type;
3783 }
3784
3785 /* Chain REF and its variants at the end. */
3786 new_ref = TYPE_REFERENCE_TO (new_type);
3787 if (new_ref)
3788 {
3789 while (TYPE_NEXT_REF_TO (new_ref))
3790 new_ref = TYPE_NEXT_REF_TO (new_ref);
3791 TYPE_NEXT_REF_TO (new_ref) = ref;
3792 }
3793 else
3794 TYPE_REFERENCE_TO (new_type) = ref;
3795
3796 /* Now adjust them. */
3797 for (; ref; ref = TYPE_NEXT_REF_TO (ref))
3798 for (t = TYPE_MAIN_VARIANT (ref); t; t = TYPE_NEXT_VARIANT (t))
3799 TREE_TYPE (t) = new_type;
3800
3801 TYPE_POINTER_TO (old_type) = NULL_TREE;
3802 TYPE_REFERENCE_TO (old_type) = NULL_TREE;
3803 }
3804
3805 /* Now deal with the unconstrained array case. In this case the pointer
3806 is actually a record where both fields are pointers to dummy nodes.
3807 Turn them into pointers to the correct types using update_pointer_to.
3808 Likewise for the pointer to the object record (thin pointer). */
3809 else
3810 {
3811 tree new_ptr = TYPE_POINTER_TO (new_type);
3812
3813 gcc_assert (TYPE_IS_FAT_POINTER_P (ptr));
3814
3815 /* If PTR already points to NEW_TYPE, nothing to do. This can happen
3816 since update_pointer_to can be invoked multiple times on the same
3817 couple of types because of the type variants. */
3818 if (TYPE_UNCONSTRAINED_ARRAY (ptr) == new_type)
3819 return;
3820
3821 update_pointer_to
3822 (TREE_TYPE (TREE_TYPE (TYPE_FIELDS (ptr))),
3823 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (new_ptr))));
3824
3825 update_pointer_to
3826 (TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (ptr)))),
3827 TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (new_ptr)))));
3828
3829 update_pointer_to (TYPE_OBJECT_RECORD_TYPE (old_type),
3830 TYPE_OBJECT_RECORD_TYPE (new_type));
3831
3832 TYPE_POINTER_TO (old_type) = NULL_TREE;
3833 }
3834 }
3835 \f
3836 /* Convert EXPR, a pointer to a constrained array, into a pointer to an
3837 unconstrained one. This involves making or finding a template. */
3838
3839 static tree
3840 convert_to_fat_pointer (tree type, tree expr)
3841 {
3842 tree template_type = TREE_TYPE (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))));
3843 tree p_array_type = TREE_TYPE (TYPE_FIELDS (type));
3844 tree etype = TREE_TYPE (expr);
3845 tree template_addr;
3846 vec<constructor_elt, va_gc> *v;
3847 vec_alloc (v, 2);
3848
3849 /* If EXPR is null, make a fat pointer that contains a null pointer to the
3850 array (compare_fat_pointers ensures that this is the full discriminant)
3851 and a valid pointer to the bounds. This latter property is necessary
3852 since the compiler can hoist the load of the bounds done through it. */
3853 if (integer_zerop (expr))
3854 {
3855 tree ptr_template_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
3856 tree null_bounds, t;
3857
3858 if (TYPE_NULL_BOUNDS (ptr_template_type))
3859 null_bounds = TYPE_NULL_BOUNDS (ptr_template_type);
3860 else
3861 {
3862 /* The template type can still be dummy at this point so we build an
3863 empty constructor. The middle-end will fill it in with zeros. */
3864 t = build_constructor (template_type,
3865 NULL);
3866 TREE_CONSTANT (t) = TREE_STATIC (t) = 1;
3867 null_bounds = build_unary_op (ADDR_EXPR, NULL_TREE, t);
3868 SET_TYPE_NULL_BOUNDS (ptr_template_type, null_bounds);
3869 }
3870
3871 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
3872 fold_convert (p_array_type, null_pointer_node));
3873 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), null_bounds);
3874 t = build_constructor (type, v);
3875 /* Do not set TREE_CONSTANT so as to force T to static memory. */
3876 TREE_CONSTANT (t) = 0;
3877 TREE_STATIC (t) = 1;
3878
3879 return t;
3880 }
3881
3882 /* If EXPR is a thin pointer, make template and data from the record. */
3883 if (TYPE_IS_THIN_POINTER_P (etype))
3884 {
3885 tree field = TYPE_FIELDS (TREE_TYPE (etype));
3886
3887 expr = gnat_protect_expr (expr);
3888
3889 /* If we have a TYPE_UNCONSTRAINED_ARRAY attached to the RECORD_TYPE,
3890 the thin pointer value has been shifted so we shift it back to get
3891 the template address. */
3892 if (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)))
3893 {
3894 template_addr
3895 = build_binary_op (POINTER_PLUS_EXPR, etype, expr,
3896 fold_build1 (NEGATE_EXPR, sizetype,
3897 byte_position
3898 (DECL_CHAIN (field))));
3899 template_addr
3900 = fold_convert (TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type))),
3901 template_addr);
3902 }
3903
3904 /* Otherwise we explicitly take the address of the fields. */
3905 else
3906 {
3907 expr = build_unary_op (INDIRECT_REF, NULL_TREE, expr);
3908 template_addr
3909 = build_unary_op (ADDR_EXPR, NULL_TREE,
3910 build_component_ref (expr, NULL_TREE, field,
3911 false));
3912 expr = build_unary_op (ADDR_EXPR, NULL_TREE,
3913 build_component_ref (expr, NULL_TREE,
3914 DECL_CHAIN (field),
3915 false));
3916 }
3917 }
3918
3919 /* Otherwise, build the constructor for the template. */
3920 else
3921 template_addr
3922 = build_unary_op (ADDR_EXPR, NULL_TREE,
3923 build_template (template_type, TREE_TYPE (etype),
3924 expr));
3925
3926 /* The final result is a constructor for the fat pointer.
3927
3928 If EXPR is an argument of a foreign convention subprogram, the type it
3929 points to is directly the component type. In this case, the expression
3930 type may not match the corresponding FIELD_DECL type at this point, so we
3931 call "convert" here to fix that up if necessary. This type consistency is
3932 required, for instance because it ensures that possible later folding of
3933 COMPONENT_REFs against this constructor always yields something of the
3934 same type as the initial reference.
3935
3936 Note that the call to "build_template" above is still fine because it
3937 will only refer to the provided TEMPLATE_TYPE in this case. */
3938 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type), convert (p_array_type, expr));
3939 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)), template_addr);
3940 return gnat_build_constructor (type, v);
3941 }
3942 \f
3943 /* Create an expression whose value is that of EXPR,
3944 converted to type TYPE. The TREE_TYPE of the value
3945 is always TYPE. This function implements all reasonable
3946 conversions; callers should filter out those that are
3947 not permitted by the language being compiled. */
3948
3949 tree
3950 convert (tree type, tree expr)
3951 {
3952 tree etype = TREE_TYPE (expr);
3953 enum tree_code ecode = TREE_CODE (etype);
3954 enum tree_code code = TREE_CODE (type);
3955
3956 /* If the expression is already of the right type, we are done. */
3957 if (etype == type)
3958 return expr;
3959
3960 /* If both input and output have padding and are of variable size, do this
3961 as an unchecked conversion. Likewise if one is a mere variant of the
3962 other, so we avoid a pointless unpad/repad sequence. */
3963 else if (code == RECORD_TYPE && ecode == RECORD_TYPE
3964 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
3965 && (!TREE_CONSTANT (TYPE_SIZE (type))
3966 || !TREE_CONSTANT (TYPE_SIZE (etype))
3967 || TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype)
3968 || TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type)))
3969 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (etype)))))
3970 ;
3971
3972 /* If the output type has padding, convert to the inner type and make a
3973 constructor to build the record, unless a variable size is involved. */
3974 else if (code == RECORD_TYPE && TYPE_PADDING_P (type))
3975 {
3976 vec<constructor_elt, va_gc> *v;
3977
3978 /* If we previously converted from another type and our type is
3979 of variable size, remove the conversion to avoid the need for
3980 variable-sized temporaries. Likewise for a conversion between
3981 original and packable version. */
3982 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
3983 && (!TREE_CONSTANT (TYPE_SIZE (type))
3984 || (ecode == RECORD_TYPE
3985 && TYPE_NAME (etype)
3986 == TYPE_NAME (TREE_TYPE (TREE_OPERAND (expr, 0))))))
3987 expr = TREE_OPERAND (expr, 0);
3988
3989 /* If we are just removing the padding from expr, convert the original
3990 object if we have variable size in order to avoid the need for some
3991 variable-sized temporaries. Likewise if the padding is a variant
3992 of the other, so we avoid a pointless unpad/repad sequence. */
3993 if (TREE_CODE (expr) == COMPONENT_REF
3994 && TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (expr, 0)))
3995 && (!TREE_CONSTANT (TYPE_SIZE (type))
3996 || TYPE_MAIN_VARIANT (type)
3997 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (expr, 0)))
3998 || (ecode == RECORD_TYPE
3999 && TYPE_NAME (etype)
4000 == TYPE_NAME (TREE_TYPE (TYPE_FIELDS (type))))))
4001 return convert (type, TREE_OPERAND (expr, 0));
4002
4003 /* If the inner type is of self-referential size and the expression type
4004 is a record, do this as an unchecked conversion. But first pad the
4005 expression if possible to have the same size on both sides. */
4006 if (ecode == RECORD_TYPE
4007 && CONTAINS_PLACEHOLDER_P (DECL_SIZE (TYPE_FIELDS (type))))
4008 {
4009 if (TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST)
4010 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4011 false, false, false, true),
4012 expr);
4013 return unchecked_convert (type, expr, false);
4014 }
4015
4016 /* If we are converting between array types with variable size, do the
4017 final conversion as an unchecked conversion, again to avoid the need
4018 for some variable-sized temporaries. If valid, this conversion is
4019 very likely purely technical and without real effects. */
4020 if (ecode == ARRAY_TYPE
4021 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == ARRAY_TYPE
4022 && !TREE_CONSTANT (TYPE_SIZE (etype))
4023 && !TREE_CONSTANT (TYPE_SIZE (type)))
4024 return unchecked_convert (type,
4025 convert (TREE_TYPE (TYPE_FIELDS (type)),
4026 expr),
4027 false);
4028
4029 vec_alloc (v, 1);
4030 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4031 convert (TREE_TYPE (TYPE_FIELDS (type)), expr));
4032 return gnat_build_constructor (type, v);
4033 }
4034
4035 /* If the input type has padding, remove it and convert to the output type.
4036 The conditions ordering is arranged to ensure that the output type is not
4037 a padding type here, as it is not clear whether the conversion would
4038 always be correct if this was to happen. */
4039 else if (ecode == RECORD_TYPE && TYPE_PADDING_P (etype))
4040 {
4041 tree unpadded;
4042
4043 /* If we have just converted to this padded type, just get the
4044 inner expression. */
4045 if (TREE_CODE (expr) == CONSTRUCTOR
4046 && !vec_safe_is_empty (CONSTRUCTOR_ELTS (expr))
4047 && (*CONSTRUCTOR_ELTS (expr))[0].index == TYPE_FIELDS (etype))
4048 unpadded = (*CONSTRUCTOR_ELTS (expr))[0].value;
4049
4050 /* Otherwise, build an explicit component reference. */
4051 else
4052 unpadded
4053 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4054
4055 return convert (type, unpadded);
4056 }
4057
4058 /* If the input is a biased type, adjust first. */
4059 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4060 return convert (type, fold_build2 (PLUS_EXPR, TREE_TYPE (etype),
4061 fold_convert (TREE_TYPE (etype), expr),
4062 fold_convert (TREE_TYPE (etype),
4063 TYPE_MIN_VALUE (etype))));
4064
4065 /* If the input is a justified modular type, we need to extract the actual
4066 object before converting it to any other type with the exceptions of an
4067 unconstrained array or of a mere type variant. It is useful to avoid the
4068 extraction and conversion in the type variant case because it could end
4069 up replacing a VAR_DECL expr by a constructor and we might be about the
4070 take the address of the result. */
4071 if (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)
4072 && code != UNCONSTRAINED_ARRAY_TYPE
4073 && TYPE_MAIN_VARIANT (type) != TYPE_MAIN_VARIANT (etype))
4074 return convert (type, build_component_ref (expr, NULL_TREE,
4075 TYPE_FIELDS (etype), false));
4076
4077 /* If converting to a type that contains a template, convert to the data
4078 type and then build the template. */
4079 if (code == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (type))
4080 {
4081 tree obj_type = TREE_TYPE (DECL_CHAIN (TYPE_FIELDS (type)));
4082 vec<constructor_elt, va_gc> *v;
4083 vec_alloc (v, 2);
4084
4085 /* If the source already has a template, get a reference to the
4086 associated array only, as we are going to rebuild a template
4087 for the target type anyway. */
4088 expr = maybe_unconstrained_array (expr);
4089
4090 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4091 build_template (TREE_TYPE (TYPE_FIELDS (type)),
4092 obj_type, NULL_TREE));
4093 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (type)),
4094 convert (obj_type, expr));
4095 return gnat_build_constructor (type, v);
4096 }
4097
4098 /* There are some cases of expressions that we process specially. */
4099 switch (TREE_CODE (expr))
4100 {
4101 case ERROR_MARK:
4102 return expr;
4103
4104 case NULL_EXPR:
4105 /* Just set its type here. For TRANSFORM_EXPR, we will do the actual
4106 conversion in gnat_expand_expr. NULL_EXPR does not represent
4107 and actual value, so no conversion is needed. */
4108 expr = copy_node (expr);
4109 TREE_TYPE (expr) = type;
4110 return expr;
4111
4112 case STRING_CST:
4113 /* If we are converting a STRING_CST to another constrained array type,
4114 just make a new one in the proper type. */
4115 if (code == ecode && AGGREGATE_TYPE_P (etype)
4116 && !(TREE_CODE (TYPE_SIZE (etype)) == INTEGER_CST
4117 && TREE_CODE (TYPE_SIZE (type)) != INTEGER_CST))
4118 {
4119 expr = copy_node (expr);
4120 TREE_TYPE (expr) = type;
4121 return expr;
4122 }
4123 break;
4124
4125 case VECTOR_CST:
4126 /* If we are converting a VECTOR_CST to a mere type variant, just make
4127 a new one in the proper type. */
4128 if (code == ecode && gnat_types_compatible_p (type, etype))
4129 {
4130 expr = copy_node (expr);
4131 TREE_TYPE (expr) = type;
4132 return expr;
4133 }
4134
4135 case CONSTRUCTOR:
4136 /* If we are converting a CONSTRUCTOR to a mere type variant, or to
4137 another padding type around the same type, just make a new one in
4138 the proper type. */
4139 if (code == ecode
4140 && (gnat_types_compatible_p (type, etype)
4141 || (code == RECORD_TYPE
4142 && TYPE_PADDING_P (type) && TYPE_PADDING_P (etype)
4143 && TREE_TYPE (TYPE_FIELDS (type))
4144 == TREE_TYPE (TYPE_FIELDS (etype)))))
4145 {
4146 expr = copy_node (expr);
4147 TREE_TYPE (expr) = type;
4148 CONSTRUCTOR_ELTS (expr) = vec_safe_copy (CONSTRUCTOR_ELTS (expr));
4149 return expr;
4150 }
4151
4152 /* Likewise for a conversion between original and packable version, or
4153 conversion between types of the same size and with the same list of
4154 fields, but we have to work harder to preserve type consistency. */
4155 if (code == ecode
4156 && code == RECORD_TYPE
4157 && (TYPE_NAME (type) == TYPE_NAME (etype)
4158 || tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (etype))))
4159
4160 {
4161 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4162 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4163 vec<constructor_elt, va_gc> *v;
4164 vec_alloc (v, len);
4165 tree efield = TYPE_FIELDS (etype), field = TYPE_FIELDS (type);
4166 unsigned HOST_WIDE_INT idx;
4167 tree index, value;
4168
4169 /* Whether we need to clear TREE_CONSTANT et al. on the output
4170 constructor when we convert in place. */
4171 bool clear_constant = false;
4172
4173 FOR_EACH_CONSTRUCTOR_ELT(e, idx, index, value)
4174 {
4175 /* Skip the missing fields in the CONSTRUCTOR. */
4176 while (efield && field && !SAME_FIELD_P (efield, index))
4177 {
4178 efield = DECL_CHAIN (efield);
4179 field = DECL_CHAIN (field);
4180 }
4181 /* The field must be the same. */
4182 if (!(efield && field && SAME_FIELD_P (efield, field)))
4183 break;
4184 constructor_elt elt
4185 = {field, convert (TREE_TYPE (field), value)};
4186 v->quick_push (elt);
4187
4188 /* If packing has made this field a bitfield and the input
4189 value couldn't be emitted statically any more, we need to
4190 clear TREE_CONSTANT on our output. */
4191 if (!clear_constant
4192 && TREE_CONSTANT (expr)
4193 && !CONSTRUCTOR_BITFIELD_P (efield)
4194 && CONSTRUCTOR_BITFIELD_P (field)
4195 && !initializer_constant_valid_for_bitfield_p (value))
4196 clear_constant = true;
4197
4198 efield = DECL_CHAIN (efield);
4199 field = DECL_CHAIN (field);
4200 }
4201
4202 /* If we have been able to match and convert all the input fields
4203 to their output type, convert in place now. We'll fallback to a
4204 view conversion downstream otherwise. */
4205 if (idx == len)
4206 {
4207 expr = copy_node (expr);
4208 TREE_TYPE (expr) = type;
4209 CONSTRUCTOR_ELTS (expr) = v;
4210 if (clear_constant)
4211 TREE_CONSTANT (expr) = TREE_STATIC (expr) = 0;
4212 return expr;
4213 }
4214 }
4215
4216 /* Likewise for a conversion between array type and vector type with a
4217 compatible representative array. */
4218 else if (code == VECTOR_TYPE
4219 && ecode == ARRAY_TYPE
4220 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4221 etype))
4222 {
4223 vec<constructor_elt, va_gc> *e = CONSTRUCTOR_ELTS (expr);
4224 unsigned HOST_WIDE_INT len = vec_safe_length (e);
4225 vec<constructor_elt, va_gc> *v;
4226 unsigned HOST_WIDE_INT ix;
4227 tree value;
4228
4229 /* Build a VECTOR_CST from a *constant* array constructor. */
4230 if (TREE_CONSTANT (expr))
4231 {
4232 bool constant_p = true;
4233
4234 /* Iterate through elements and check if all constructor
4235 elements are *_CSTs. */
4236 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4237 if (!CONSTANT_CLASS_P (value))
4238 {
4239 constant_p = false;
4240 break;
4241 }
4242
4243 if (constant_p)
4244 return build_vector_from_ctor (type,
4245 CONSTRUCTOR_ELTS (expr));
4246 }
4247
4248 /* Otherwise, build a regular vector constructor. */
4249 vec_alloc (v, len);
4250 FOR_EACH_CONSTRUCTOR_VALUE (e, ix, value)
4251 {
4252 constructor_elt elt = {NULL_TREE, value};
4253 v->quick_push (elt);
4254 }
4255 expr = copy_node (expr);
4256 TREE_TYPE (expr) = type;
4257 CONSTRUCTOR_ELTS (expr) = v;
4258 return expr;
4259 }
4260 break;
4261
4262 case UNCONSTRAINED_ARRAY_REF:
4263 /* First retrieve the underlying array. */
4264 expr = maybe_unconstrained_array (expr);
4265 etype = TREE_TYPE (expr);
4266 ecode = TREE_CODE (etype);
4267 break;
4268
4269 case VIEW_CONVERT_EXPR:
4270 {
4271 /* GCC 4.x is very sensitive to type consistency overall, and view
4272 conversions thus are very frequent. Even though just "convert"ing
4273 the inner operand to the output type is fine in most cases, it
4274 might expose unexpected input/output type mismatches in special
4275 circumstances so we avoid such recursive calls when we can. */
4276 tree op0 = TREE_OPERAND (expr, 0);
4277
4278 /* If we are converting back to the original type, we can just
4279 lift the input conversion. This is a common occurrence with
4280 switches back-and-forth amongst type variants. */
4281 if (type == TREE_TYPE (op0))
4282 return op0;
4283
4284 /* Otherwise, if we're converting between two aggregate or vector
4285 types, we might be allowed to substitute the VIEW_CONVERT_EXPR
4286 target type in place or to just convert the inner expression. */
4287 if ((AGGREGATE_TYPE_P (type) && AGGREGATE_TYPE_P (etype))
4288 || (VECTOR_TYPE_P (type) && VECTOR_TYPE_P (etype)))
4289 {
4290 /* If we are converting between mere variants, we can just
4291 substitute the VIEW_CONVERT_EXPR in place. */
4292 if (gnat_types_compatible_p (type, etype))
4293 return build1 (VIEW_CONVERT_EXPR, type, op0);
4294
4295 /* Otherwise, we may just bypass the input view conversion unless
4296 one of the types is a fat pointer, which is handled by
4297 specialized code below which relies on exact type matching. */
4298 else if (!TYPE_IS_FAT_POINTER_P (type)
4299 && !TYPE_IS_FAT_POINTER_P (etype))
4300 return convert (type, op0);
4301 }
4302
4303 break;
4304 }
4305
4306 default:
4307 break;
4308 }
4309
4310 /* Check for converting to a pointer to an unconstrained array. */
4311 if (TYPE_IS_FAT_POINTER_P (type) && !TYPE_IS_FAT_POINTER_P (etype))
4312 return convert_to_fat_pointer (type, expr);
4313
4314 /* If we are converting between two aggregate or vector types that are mere
4315 variants, just make a VIEW_CONVERT_EXPR. Likewise when we are converting
4316 to a vector type from its representative array type. */
4317 else if ((code == ecode
4318 && (AGGREGATE_TYPE_P (type) || VECTOR_TYPE_P (type))
4319 && gnat_types_compatible_p (type, etype))
4320 || (code == VECTOR_TYPE
4321 && ecode == ARRAY_TYPE
4322 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4323 etype)))
4324 return build1 (VIEW_CONVERT_EXPR, type, expr);
4325
4326 /* If we are converting between tagged types, try to upcast properly. */
4327 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4328 && TYPE_ALIGN_OK (etype) && TYPE_ALIGN_OK (type))
4329 {
4330 tree child_etype = etype;
4331 do {
4332 tree field = TYPE_FIELDS (child_etype);
4333 if (DECL_NAME (field) == parent_name_id && TREE_TYPE (field) == type)
4334 return build_component_ref (expr, NULL_TREE, field, false);
4335 child_etype = TREE_TYPE (field);
4336 } while (TREE_CODE (child_etype) == RECORD_TYPE);
4337 }
4338
4339 /* If we are converting from a smaller form of record type back to it, just
4340 make a VIEW_CONVERT_EXPR. But first pad the expression to have the same
4341 size on both sides. */
4342 else if (ecode == RECORD_TYPE && code == RECORD_TYPE
4343 && smaller_form_type_p (etype, type))
4344 {
4345 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4346 false, false, false, true),
4347 expr);
4348 return build1 (VIEW_CONVERT_EXPR, type, expr);
4349 }
4350
4351 /* In all other cases of related types, make a NOP_EXPR. */
4352 else if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (etype))
4353 return fold_convert (type, expr);
4354
4355 switch (code)
4356 {
4357 case VOID_TYPE:
4358 return fold_build1 (CONVERT_EXPR, type, expr);
4359
4360 case INTEGER_TYPE:
4361 if (TYPE_HAS_ACTUAL_BOUNDS_P (type)
4362 && (ecode == ARRAY_TYPE || ecode == UNCONSTRAINED_ARRAY_TYPE
4363 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))))
4364 return unchecked_convert (type, expr, false);
4365 else if (TYPE_BIASED_REPRESENTATION_P (type))
4366 return fold_convert (type,
4367 fold_build2 (MINUS_EXPR, TREE_TYPE (type),
4368 convert (TREE_TYPE (type), expr),
4369 convert (TREE_TYPE (type),
4370 TYPE_MIN_VALUE (type))));
4371
4372 /* ... fall through ... */
4373
4374 case ENUMERAL_TYPE:
4375 case BOOLEAN_TYPE:
4376 /* If we are converting an additive expression to an integer type
4377 with lower precision, be wary of the optimization that can be
4378 applied by convert_to_integer. There are 2 problematic cases:
4379 - if the first operand was originally of a biased type,
4380 because we could be recursively called to convert it
4381 to an intermediate type and thus rematerialize the
4382 additive operator endlessly,
4383 - if the expression contains a placeholder, because an
4384 intermediate conversion that changes the sign could
4385 be inserted and thus introduce an artificial overflow
4386 at compile time when the placeholder is substituted. */
4387 if (code == INTEGER_TYPE
4388 && ecode == INTEGER_TYPE
4389 && TYPE_PRECISION (type) < TYPE_PRECISION (etype)
4390 && (TREE_CODE (expr) == PLUS_EXPR || TREE_CODE (expr) == MINUS_EXPR))
4391 {
4392 tree op0 = get_unwidened (TREE_OPERAND (expr, 0), type);
4393
4394 if ((TREE_CODE (TREE_TYPE (op0)) == INTEGER_TYPE
4395 && TYPE_BIASED_REPRESENTATION_P (TREE_TYPE (op0)))
4396 || CONTAINS_PLACEHOLDER_P (expr))
4397 return build1 (NOP_EXPR, type, expr);
4398 }
4399
4400 return fold (convert_to_integer (type, expr));
4401
4402 case POINTER_TYPE:
4403 case REFERENCE_TYPE:
4404 /* If converting between two thin pointers, adjust if needed to account
4405 for differing offsets from the base pointer, depending on whether
4406 there is a TYPE_UNCONSTRAINED_ARRAY attached to the record type. */
4407 if (TYPE_IS_THIN_POINTER_P (etype) && TYPE_IS_THIN_POINTER_P (type))
4408 {
4409 tree etype_pos
4410 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (etype)) != NULL_TREE
4411 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (etype))))
4412 : size_zero_node;
4413 tree type_pos
4414 = TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != NULL_TREE
4415 ? byte_position (DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (type))))
4416 : size_zero_node;
4417 tree byte_diff = size_diffop (type_pos, etype_pos);
4418
4419 expr = build1 (NOP_EXPR, type, expr);
4420 if (integer_zerop (byte_diff))
4421 return expr;
4422
4423 return build_binary_op (POINTER_PLUS_EXPR, type, expr,
4424 fold_convert (sizetype, byte_diff));
4425 }
4426
4427 /* If converting fat pointer to normal or thin pointer, get the pointer
4428 to the array and then convert it. */
4429 if (TYPE_IS_FAT_POINTER_P (etype))
4430 expr
4431 = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (etype), false);
4432
4433 return fold (convert_to_pointer (type, expr));
4434
4435 case REAL_TYPE:
4436 return fold (convert_to_real (type, expr));
4437
4438 case RECORD_TYPE:
4439 if (TYPE_JUSTIFIED_MODULAR_P (type) && !AGGREGATE_TYPE_P (etype))
4440 {
4441 vec<constructor_elt, va_gc> *v;
4442 vec_alloc (v, 1);
4443
4444 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (type),
4445 convert (TREE_TYPE (TYPE_FIELDS (type)),
4446 expr));
4447 return gnat_build_constructor (type, v);
4448 }
4449
4450 /* ... fall through ... */
4451
4452 case ARRAY_TYPE:
4453 /* In these cases, assume the front-end has validated the conversion.
4454 If the conversion is valid, it will be a bit-wise conversion, so
4455 it can be viewed as an unchecked conversion. */
4456 return unchecked_convert (type, expr, false);
4457
4458 case UNION_TYPE:
4459 /* This is a either a conversion between a tagged type and some
4460 subtype, which we have to mark as a UNION_TYPE because of
4461 overlapping fields or a conversion of an Unchecked_Union. */
4462 return unchecked_convert (type, expr, false);
4463
4464 case UNCONSTRAINED_ARRAY_TYPE:
4465 /* If the input is a VECTOR_TYPE, convert to the representative
4466 array type first. */
4467 if (ecode == VECTOR_TYPE)
4468 {
4469 expr = convert (TYPE_REPRESENTATIVE_ARRAY (etype), expr);
4470 etype = TREE_TYPE (expr);
4471 ecode = TREE_CODE (etype);
4472 }
4473
4474 /* If EXPR is a constrained array, take its address, convert it to a
4475 fat pointer, and then dereference it. Likewise if EXPR is a
4476 record containing both a template and a constrained array.
4477 Note that a record representing a justified modular type
4478 always represents a packed constrained array. */
4479 if (ecode == ARRAY_TYPE
4480 || (ecode == INTEGER_TYPE && TYPE_HAS_ACTUAL_BOUNDS_P (etype))
4481 || (ecode == RECORD_TYPE && TYPE_CONTAINS_TEMPLATE_P (etype))
4482 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype)))
4483 return
4484 build_unary_op
4485 (INDIRECT_REF, NULL_TREE,
4486 convert_to_fat_pointer (TREE_TYPE (type),
4487 build_unary_op (ADDR_EXPR,
4488 NULL_TREE, expr)));
4489
4490 /* Do something very similar for converting one unconstrained
4491 array to another. */
4492 else if (ecode == UNCONSTRAINED_ARRAY_TYPE)
4493 return
4494 build_unary_op (INDIRECT_REF, NULL_TREE,
4495 convert (TREE_TYPE (type),
4496 build_unary_op (ADDR_EXPR,
4497 NULL_TREE, expr)));
4498 else
4499 gcc_unreachable ();
4500
4501 case COMPLEX_TYPE:
4502 return fold (convert_to_complex (type, expr));
4503
4504 default:
4505 gcc_unreachable ();
4506 }
4507 }
4508
4509 /* Create an expression whose value is that of EXPR converted to the common
4510 index type, which is sizetype. EXPR is supposed to be in the base type
4511 of the GNAT index type. Calling it is equivalent to doing
4512
4513 convert (sizetype, expr)
4514
4515 but we try to distribute the type conversion with the knowledge that EXPR
4516 cannot overflow in its type. This is a best-effort approach and we fall
4517 back to the above expression as soon as difficulties are encountered.
4518
4519 This is necessary to overcome issues that arise when the GNAT base index
4520 type and the GCC common index type (sizetype) don't have the same size,
4521 which is quite frequent on 64-bit architectures. In this case, and if
4522 the GNAT base index type is signed but the iteration type of the loop has
4523 been forced to unsigned, the loop scalar evolution engine cannot compute
4524 a simple evolution for the general induction variables associated with the
4525 array indices, because it will preserve the wrap-around semantics in the
4526 unsigned type of their "inner" part. As a result, many loop optimizations
4527 are blocked.
4528
4529 The solution is to use a special (basic) induction variable that is at
4530 least as large as sizetype, and to express the aforementioned general
4531 induction variables in terms of this induction variable, eliminating
4532 the problematic intermediate truncation to the GNAT base index type.
4533 This is possible as long as the original expression doesn't overflow
4534 and if the middle-end hasn't introduced artificial overflows in the
4535 course of the various simplification it can make to the expression. */
4536
4537 tree
4538 convert_to_index_type (tree expr)
4539 {
4540 enum tree_code code = TREE_CODE (expr);
4541 tree type = TREE_TYPE (expr);
4542
4543 /* If the type is unsigned, overflow is allowed so we cannot be sure that
4544 EXPR doesn't overflow. Keep it simple if optimization is disabled. */
4545 if (TYPE_UNSIGNED (type) || !optimize)
4546 return convert (sizetype, expr);
4547
4548 switch (code)
4549 {
4550 case VAR_DECL:
4551 /* The main effect of the function: replace a loop parameter with its
4552 associated special induction variable. */
4553 if (DECL_LOOP_PARM_P (expr) && DECL_INDUCTION_VAR (expr))
4554 expr = DECL_INDUCTION_VAR (expr);
4555 break;
4556
4557 CASE_CONVERT:
4558 {
4559 tree otype = TREE_TYPE (TREE_OPERAND (expr, 0));
4560 /* Bail out as soon as we suspect some sort of type frobbing. */
4561 if (TYPE_PRECISION (type) != TYPE_PRECISION (otype)
4562 || TYPE_UNSIGNED (type) != TYPE_UNSIGNED (otype))
4563 break;
4564 }
4565
4566 /* ... fall through ... */
4567
4568 case NON_LVALUE_EXPR:
4569 return fold_build1 (code, sizetype,
4570 convert_to_index_type (TREE_OPERAND (expr, 0)));
4571
4572 case PLUS_EXPR:
4573 case MINUS_EXPR:
4574 case MULT_EXPR:
4575 return fold_build2 (code, sizetype,
4576 convert_to_index_type (TREE_OPERAND (expr, 0)),
4577 convert_to_index_type (TREE_OPERAND (expr, 1)));
4578
4579 case COMPOUND_EXPR:
4580 return fold_build2 (code, sizetype, TREE_OPERAND (expr, 0),
4581 convert_to_index_type (TREE_OPERAND (expr, 1)));
4582
4583 case COND_EXPR:
4584 return fold_build3 (code, sizetype, TREE_OPERAND (expr, 0),
4585 convert_to_index_type (TREE_OPERAND (expr, 1)),
4586 convert_to_index_type (TREE_OPERAND (expr, 2)));
4587
4588 default:
4589 break;
4590 }
4591
4592 return convert (sizetype, expr);
4593 }
4594 \f
4595 /* Remove all conversions that are done in EXP. This includes converting
4596 from a padded type or to a justified modular type. If TRUE_ADDRESS
4597 is true, always return the address of the containing object even if
4598 the address is not bit-aligned. */
4599
4600 tree
4601 remove_conversions (tree exp, bool true_address)
4602 {
4603 switch (TREE_CODE (exp))
4604 {
4605 case CONSTRUCTOR:
4606 if (true_address
4607 && TREE_CODE (TREE_TYPE (exp)) == RECORD_TYPE
4608 && TYPE_JUSTIFIED_MODULAR_P (TREE_TYPE (exp)))
4609 return
4610 remove_conversions ((*CONSTRUCTOR_ELTS (exp))[0].value, true);
4611 break;
4612
4613 case COMPONENT_REF:
4614 if (TYPE_IS_PADDING_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
4615 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4616 break;
4617
4618 CASE_CONVERT:
4619 case VIEW_CONVERT_EXPR:
4620 case NON_LVALUE_EXPR:
4621 return remove_conversions (TREE_OPERAND (exp, 0), true_address);
4622
4623 default:
4624 break;
4625 }
4626
4627 return exp;
4628 }
4629 \f
4630 /* If EXP's type is an UNCONSTRAINED_ARRAY_TYPE, return an expression that
4631 refers to the underlying array. If it has TYPE_CONTAINS_TEMPLATE_P,
4632 likewise return an expression pointing to the underlying array. */
4633
4634 tree
4635 maybe_unconstrained_array (tree exp)
4636 {
4637 enum tree_code code = TREE_CODE (exp);
4638 tree type = TREE_TYPE (exp);
4639
4640 switch (TREE_CODE (type))
4641 {
4642 case UNCONSTRAINED_ARRAY_TYPE:
4643 if (code == UNCONSTRAINED_ARRAY_REF)
4644 {
4645 const bool read_only = TREE_READONLY (exp);
4646 const bool no_trap = TREE_THIS_NOTRAP (exp);
4647
4648 exp = TREE_OPERAND (exp, 0);
4649 type = TREE_TYPE (exp);
4650
4651 if (TREE_CODE (exp) == COND_EXPR)
4652 {
4653 tree op1
4654 = build_unary_op (INDIRECT_REF, NULL_TREE,
4655 build_component_ref (TREE_OPERAND (exp, 1),
4656 NULL_TREE,
4657 TYPE_FIELDS (type),
4658 false));
4659 tree op2
4660 = build_unary_op (INDIRECT_REF, NULL_TREE,
4661 build_component_ref (TREE_OPERAND (exp, 2),
4662 NULL_TREE,
4663 TYPE_FIELDS (type),
4664 false));
4665
4666 exp = build3 (COND_EXPR,
4667 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (type))),
4668 TREE_OPERAND (exp, 0), op1, op2);
4669 }
4670 else
4671 {
4672 exp = build_unary_op (INDIRECT_REF, NULL_TREE,
4673 build_component_ref (exp, NULL_TREE,
4674 TYPE_FIELDS (type),
4675 false));
4676 TREE_READONLY (exp) = read_only;
4677 TREE_THIS_NOTRAP (exp) = no_trap;
4678 }
4679 }
4680
4681 else if (code == NULL_EXPR)
4682 exp = build1 (NULL_EXPR,
4683 TREE_TYPE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (type)))),
4684 TREE_OPERAND (exp, 0));
4685 break;
4686
4687 case RECORD_TYPE:
4688 /* If this is a padded type and it contains a template, convert to the
4689 unpadded type first. */
4690 if (TYPE_PADDING_P (type)
4691 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
4692 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
4693 {
4694 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4695 type = TREE_TYPE (exp);
4696 }
4697
4698 if (TYPE_CONTAINS_TEMPLATE_P (type))
4699 {
4700 exp = build_component_ref (exp, NULL_TREE,
4701 DECL_CHAIN (TYPE_FIELDS (type)),
4702 false);
4703 type = TREE_TYPE (exp);
4704
4705 /* If the array type is padded, convert to the unpadded type. */
4706 if (TYPE_IS_PADDING_P (type))
4707 exp = convert (TREE_TYPE (TYPE_FIELDS (type)), exp);
4708 }
4709 break;
4710
4711 default:
4712 break;
4713 }
4714
4715 return exp;
4716 }
4717 \f
4718 /* Return true if EXPR is an expression that can be folded as an operand
4719 of a VIEW_CONVERT_EXPR. See ada-tree.h for a complete rationale. */
4720
4721 static bool
4722 can_fold_for_view_convert_p (tree expr)
4723 {
4724 tree t1, t2;
4725
4726 /* The folder will fold NOP_EXPRs between integral types with the same
4727 precision (in the middle-end's sense). We cannot allow it if the
4728 types don't have the same precision in the Ada sense as well. */
4729 if (TREE_CODE (expr) != NOP_EXPR)
4730 return true;
4731
4732 t1 = TREE_TYPE (expr);
4733 t2 = TREE_TYPE (TREE_OPERAND (expr, 0));
4734
4735 /* Defer to the folder for non-integral conversions. */
4736 if (!(INTEGRAL_TYPE_P (t1) && INTEGRAL_TYPE_P (t2)))
4737 return true;
4738
4739 /* Only fold conversions that preserve both precisions. */
4740 if (TYPE_PRECISION (t1) == TYPE_PRECISION (t2)
4741 && operand_equal_p (rm_size (t1), rm_size (t2), 0))
4742 return true;
4743
4744 return false;
4745 }
4746
4747 /* Return an expression that does an unchecked conversion of EXPR to TYPE.
4748 If NOTRUNC_P is true, truncation operations should be suppressed.
4749
4750 Special care is required with (source or target) integral types whose
4751 precision is not equal to their size, to make sure we fetch or assign
4752 the value bits whose location might depend on the endianness, e.g.
4753
4754 Rmsize : constant := 8;
4755 subtype Int is Integer range 0 .. 2 ** Rmsize - 1;
4756
4757 type Bit_Array is array (1 .. Rmsize) of Boolean;
4758 pragma Pack (Bit_Array);
4759
4760 function To_Bit_Array is new Unchecked_Conversion (Int, Bit_Array);
4761
4762 Value : Int := 2#1000_0001#;
4763 Vbits : Bit_Array := To_Bit_Array (Value);
4764
4765 we expect the 8 bits at Vbits'Address to always contain Value, while
4766 their original location depends on the endianness, at Value'Address
4767 on a little-endian architecture but not on a big-endian one. */
4768
4769 tree
4770 unchecked_convert (tree type, tree expr, bool notrunc_p)
4771 {
4772 tree etype = TREE_TYPE (expr);
4773 enum tree_code ecode = TREE_CODE (etype);
4774 enum tree_code code = TREE_CODE (type);
4775 tree tem;
4776 int c;
4777
4778 /* If the expression is already of the right type, we are done. */
4779 if (etype == type)
4780 return expr;
4781
4782 /* If both types types are integral just do a normal conversion.
4783 Likewise for a conversion to an unconstrained array. */
4784 if (((INTEGRAL_TYPE_P (type)
4785 || (POINTER_TYPE_P (type) && !TYPE_IS_THIN_POINTER_P (type))
4786 || (code == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (type)))
4787 && (INTEGRAL_TYPE_P (etype)
4788 || (POINTER_TYPE_P (etype) && !TYPE_IS_THIN_POINTER_P (etype))
4789 || (ecode == RECORD_TYPE && TYPE_JUSTIFIED_MODULAR_P (etype))))
4790 || code == UNCONSTRAINED_ARRAY_TYPE)
4791 {
4792 if (ecode == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (etype))
4793 {
4794 tree ntype = copy_type (etype);
4795 TYPE_BIASED_REPRESENTATION_P (ntype) = 0;
4796 TYPE_MAIN_VARIANT (ntype) = ntype;
4797 expr = build1 (NOP_EXPR, ntype, expr);
4798 }
4799
4800 if (code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4801 {
4802 tree rtype = copy_type (type);
4803 TYPE_BIASED_REPRESENTATION_P (rtype) = 0;
4804 TYPE_MAIN_VARIANT (rtype) = rtype;
4805 expr = convert (rtype, expr);
4806 expr = build1 (NOP_EXPR, type, expr);
4807 }
4808 else
4809 expr = convert (type, expr);
4810 }
4811
4812 /* If we are converting to an integral type whose precision is not equal
4813 to its size, first unchecked convert to a record type that contains an
4814 field of the given precision. Then extract the field. */
4815 else if (INTEGRAL_TYPE_P (type)
4816 && TYPE_RM_SIZE (type)
4817 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4818 GET_MODE_BITSIZE (TYPE_MODE (type))))
4819 {
4820 tree rec_type = make_node (RECORD_TYPE);
4821 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (type));
4822 tree field_type, field;
4823
4824 if (TYPE_UNSIGNED (type))
4825 field_type = make_unsigned_type (prec);
4826 else
4827 field_type = make_signed_type (prec);
4828 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (type));
4829
4830 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4831 NULL_TREE, bitsize_zero_node, 1, 0);
4832
4833 finish_record_type (rec_type, field, 1, false);
4834
4835 expr = unchecked_convert (rec_type, expr, notrunc_p);
4836 expr = build_component_ref (expr, NULL_TREE, field, false);
4837 expr = fold_build1 (NOP_EXPR, type, expr);
4838 }
4839
4840 /* Similarly if we are converting from an integral type whose precision is
4841 not equal to its size, first copy into a field of the given precision
4842 and unchecked convert the record type. */
4843 else if (INTEGRAL_TYPE_P (etype)
4844 && TYPE_RM_SIZE (etype)
4845 && 0 != compare_tree_int (TYPE_RM_SIZE (etype),
4846 GET_MODE_BITSIZE (TYPE_MODE (etype))))
4847 {
4848 tree rec_type = make_node (RECORD_TYPE);
4849 unsigned HOST_WIDE_INT prec = TREE_INT_CST_LOW (TYPE_RM_SIZE (etype));
4850 vec<constructor_elt, va_gc> *v;
4851 vec_alloc (v, 1);
4852 tree field_type, field;
4853
4854 if (TYPE_UNSIGNED (etype))
4855 field_type = make_unsigned_type (prec);
4856 else
4857 field_type = make_signed_type (prec);
4858 SET_TYPE_RM_SIZE (field_type, TYPE_RM_SIZE (etype));
4859
4860 field = create_field_decl (get_identifier ("OBJ"), field_type, rec_type,
4861 NULL_TREE, bitsize_zero_node, 1, 0);
4862
4863 finish_record_type (rec_type, field, 1, false);
4864
4865 expr = fold_build1 (NOP_EXPR, field_type, expr);
4866 CONSTRUCTOR_APPEND_ELT (v, field, expr);
4867 expr = gnat_build_constructor (rec_type, v);
4868 expr = unchecked_convert (type, expr, notrunc_p);
4869 }
4870
4871 /* If we are converting from a scalar type to a type with a different size,
4872 we need to pad to have the same size on both sides.
4873
4874 ??? We cannot do it unconditionally because unchecked conversions are
4875 used liberally by the front-end to implement polymorphism, e.g. in:
4876
4877 S191s : constant ada__tags__addr_ptr := ada__tags__addr_ptr!(S190s);
4878 return p___size__4 (p__object!(S191s.all));
4879
4880 so we skip all expressions that are references. */
4881 else if (!REFERENCE_CLASS_P (expr)
4882 && !AGGREGATE_TYPE_P (etype)
4883 && TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST
4884 && (c = tree_int_cst_compare (TYPE_SIZE (etype), TYPE_SIZE (type))))
4885 {
4886 if (c < 0)
4887 {
4888 expr = convert (maybe_pad_type (etype, TYPE_SIZE (type), 0, Empty,
4889 false, false, false, true),
4890 expr);
4891 expr = unchecked_convert (type, expr, notrunc_p);
4892 }
4893 else
4894 {
4895 tree rec_type = maybe_pad_type (type, TYPE_SIZE (etype), 0, Empty,
4896 false, false, false, true);
4897 expr = unchecked_convert (rec_type, expr, notrunc_p);
4898 expr = build_component_ref (expr, NULL_TREE, TYPE_FIELDS (rec_type),
4899 false);
4900 }
4901 }
4902
4903 /* We have a special case when we are converting between two unconstrained
4904 array types. In that case, take the address, convert the fat pointer
4905 types, and dereference. */
4906 else if (ecode == code && code == UNCONSTRAINED_ARRAY_TYPE)
4907 expr = build_unary_op (INDIRECT_REF, NULL_TREE,
4908 build1 (VIEW_CONVERT_EXPR, TREE_TYPE (type),
4909 build_unary_op (ADDR_EXPR, NULL_TREE,
4910 expr)));
4911
4912 /* Another special case is when we are converting to a vector type from its
4913 representative array type; this a regular conversion. */
4914 else if (code == VECTOR_TYPE
4915 && ecode == ARRAY_TYPE
4916 && gnat_types_compatible_p (TYPE_REPRESENTATIVE_ARRAY (type),
4917 etype))
4918 expr = convert (type, expr);
4919
4920 /* And, if the array type is not the representative, we try to build an
4921 intermediate vector type of which the array type is the representative
4922 and to do the unchecked conversion between the vector types, in order
4923 to enable further simplifications in the middle-end. */
4924 else if (code == VECTOR_TYPE
4925 && ecode == ARRAY_TYPE
4926 && (tem = build_vector_type_for_array (etype, NULL_TREE)))
4927 {
4928 expr = convert (tem, expr);
4929 return unchecked_convert (type, expr, notrunc_p);
4930 }
4931
4932 /* If we are converting a CONSTRUCTOR to a more aligned RECORD_TYPE, bump
4933 the alignment of the CONSTRUCTOR to speed up the copy operation. */
4934 else if (TREE_CODE (expr) == CONSTRUCTOR
4935 && code == RECORD_TYPE
4936 && TYPE_ALIGN (etype) < TYPE_ALIGN (type))
4937 {
4938 expr = convert (maybe_pad_type (etype, NULL_TREE, TYPE_ALIGN (type),
4939 Empty, false, false, false, true),
4940 expr);
4941 return unchecked_convert (type, expr, notrunc_p);
4942 }
4943
4944 /* Otherwise, just build a VIEW_CONVERT_EXPR of the expression. */
4945 else
4946 {
4947 expr = maybe_unconstrained_array (expr);
4948 etype = TREE_TYPE (expr);
4949 ecode = TREE_CODE (etype);
4950 if (can_fold_for_view_convert_p (expr))
4951 expr = fold_build1 (VIEW_CONVERT_EXPR, type, expr);
4952 else
4953 expr = build1 (VIEW_CONVERT_EXPR, type, expr);
4954 }
4955
4956 /* If the result is an integral type whose precision is not equal to its
4957 size, sign- or zero-extend the result. We need not do this if the input
4958 is an integral type of the same precision and signedness or if the output
4959 is a biased type or if both the input and output are unsigned. */
4960 if (!notrunc_p
4961 && INTEGRAL_TYPE_P (type) && TYPE_RM_SIZE (type)
4962 && !(code == INTEGER_TYPE && TYPE_BIASED_REPRESENTATION_P (type))
4963 && 0 != compare_tree_int (TYPE_RM_SIZE (type),
4964 GET_MODE_BITSIZE (TYPE_MODE (type)))
4965 && !(INTEGRAL_TYPE_P (etype)
4966 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (etype)
4967 && operand_equal_p (TYPE_RM_SIZE (type),
4968 (TYPE_RM_SIZE (etype) != 0
4969 ? TYPE_RM_SIZE (etype) : TYPE_SIZE (etype)),
4970 0))
4971 && !(TYPE_UNSIGNED (type) && TYPE_UNSIGNED (etype)))
4972 {
4973 tree base_type
4974 = gnat_type_for_mode (TYPE_MODE (type), TYPE_UNSIGNED (type));
4975 tree shift_expr
4976 = convert (base_type,
4977 size_binop (MINUS_EXPR,
4978 bitsize_int
4979 (GET_MODE_BITSIZE (TYPE_MODE (type))),
4980 TYPE_RM_SIZE (type)));
4981 expr
4982 = convert (type,
4983 build_binary_op (RSHIFT_EXPR, base_type,
4984 build_binary_op (LSHIFT_EXPR, base_type,
4985 convert (base_type, expr),
4986 shift_expr),
4987 shift_expr));
4988 }
4989
4990 /* An unchecked conversion should never raise Constraint_Error. The code
4991 below assumes that GCC's conversion routines overflow the same way that
4992 the underlying hardware does. This is probably true. In the rare case
4993 when it is false, we can rely on the fact that such conversions are
4994 erroneous anyway. */
4995 if (TREE_CODE (expr) == INTEGER_CST)
4996 TREE_OVERFLOW (expr) = 0;
4997
4998 /* If the sizes of the types differ and this is an VIEW_CONVERT_EXPR,
4999 show no longer constant. */
5000 if (TREE_CODE (expr) == VIEW_CONVERT_EXPR
5001 && !operand_equal_p (TYPE_SIZE_UNIT (type), TYPE_SIZE_UNIT (etype),
5002 OEP_ONLY_CONST))
5003 TREE_CONSTANT (expr) = 0;
5004
5005 return expr;
5006 }
5007 \f
5008 /* Return the appropriate GCC tree code for the specified GNAT_TYPE,
5009 the latter being a record type as predicated by Is_Record_Type. */
5010
5011 enum tree_code
5012 tree_code_for_record_type (Entity_Id gnat_type)
5013 {
5014 Node_Id component_list, component;
5015
5016 /* Return UNION_TYPE if it's an Unchecked_Union whose non-discriminant
5017 fields are all in the variant part. Otherwise, return RECORD_TYPE. */
5018 if (!Is_Unchecked_Union (gnat_type))
5019 return RECORD_TYPE;
5020
5021 gnat_type = Implementation_Base_Type (gnat_type);
5022 component_list
5023 = Component_List (Type_Definition (Declaration_Node (gnat_type)));
5024
5025 for (component = First_Non_Pragma (Component_Items (component_list));
5026 Present (component);
5027 component = Next_Non_Pragma (component))
5028 if (Ekind (Defining_Entity (component)) == E_Component)
5029 return RECORD_TYPE;
5030
5031 return UNION_TYPE;
5032 }
5033
5034 /* Return true if GNAT_TYPE is a "double" floating-point type, i.e. whose
5035 size is equal to 64 bits, or an array of such a type. Set ALIGN_CLAUSE
5036 according to the presence of an alignment clause on the type or, if it
5037 is an array, on the component type. */
5038
5039 bool
5040 is_double_float_or_array (Entity_Id gnat_type, bool *align_clause)
5041 {
5042 gnat_type = Underlying_Type (gnat_type);
5043
5044 *align_clause = Present (Alignment_Clause (gnat_type));
5045
5046 if (Is_Array_Type (gnat_type))
5047 {
5048 gnat_type = Underlying_Type (Component_Type (gnat_type));
5049 if (Present (Alignment_Clause (gnat_type)))
5050 *align_clause = true;
5051 }
5052
5053 if (!Is_Floating_Point_Type (gnat_type))
5054 return false;
5055
5056 if (UI_To_Int (Esize (gnat_type)) != 64)
5057 return false;
5058
5059 return true;
5060 }
5061
5062 /* Return true if GNAT_TYPE is a "double" or larger scalar type, i.e. whose
5063 size is greater or equal to 64 bits, or an array of such a type. Set
5064 ALIGN_CLAUSE according to the presence of an alignment clause on the
5065 type or, if it is an array, on the component type. */
5066
5067 bool
5068 is_double_scalar_or_array (Entity_Id gnat_type, bool *align_clause)
5069 {
5070 gnat_type = Underlying_Type (gnat_type);
5071
5072 *align_clause = Present (Alignment_Clause (gnat_type));
5073
5074 if (Is_Array_Type (gnat_type))
5075 {
5076 gnat_type = Underlying_Type (Component_Type (gnat_type));
5077 if (Present (Alignment_Clause (gnat_type)))
5078 *align_clause = true;
5079 }
5080
5081 if (!Is_Scalar_Type (gnat_type))
5082 return false;
5083
5084 if (UI_To_Int (Esize (gnat_type)) < 64)
5085 return false;
5086
5087 return true;
5088 }
5089
5090 /* Return true if GNU_TYPE is suitable as the type of a non-aliased
5091 component of an aggregate type. */
5092
5093 bool
5094 type_for_nonaliased_component_p (tree gnu_type)
5095 {
5096 /* If the type is passed by reference, we may have pointers to the
5097 component so it cannot be made non-aliased. */
5098 if (must_pass_by_ref (gnu_type) || default_pass_by_ref (gnu_type))
5099 return false;
5100
5101 /* We used to say that any component of aggregate type is aliased
5102 because the front-end may take 'Reference of it. The front-end
5103 has been enhanced in the meantime so as to use a renaming instead
5104 in most cases, but the back-end can probably take the address of
5105 such a component too so we go for the conservative stance.
5106
5107 For instance, we might need the address of any array type, even
5108 if normally passed by copy, to construct a fat pointer if the
5109 component is used as an actual for an unconstrained formal.
5110
5111 Likewise for record types: even if a specific record subtype is
5112 passed by copy, the parent type might be passed by ref (e.g. if
5113 it's of variable size) and we might take the address of a child
5114 component to pass to a parent formal. We have no way to check
5115 for such conditions here. */
5116 if (AGGREGATE_TYPE_P (gnu_type))
5117 return false;
5118
5119 return true;
5120 }
5121
5122 /* Return true if TYPE is a smaller form of ORIG_TYPE. */
5123
5124 bool
5125 smaller_form_type_p (tree type, tree orig_type)
5126 {
5127 tree size, osize;
5128
5129 /* We're not interested in variants here. */
5130 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig_type))
5131 return false;
5132
5133 /* Like a variant, a packable version keeps the original TYPE_NAME. */
5134 if (TYPE_NAME (type) != TYPE_NAME (orig_type))
5135 return false;
5136
5137 size = TYPE_SIZE (type);
5138 osize = TYPE_SIZE (orig_type);
5139
5140 if (!(TREE_CODE (size) == INTEGER_CST && TREE_CODE (osize) == INTEGER_CST))
5141 return false;
5142
5143 return tree_int_cst_lt (size, osize) != 0;
5144 }
5145
5146 /* Perform final processing on global variables. */
5147
5148 static GTY (()) tree dummy_global;
5149
5150 void
5151 gnat_write_global_declarations (void)
5152 {
5153 unsigned int i;
5154 tree iter;
5155
5156 /* If we have declared types as used at the global level, insert them in
5157 the global hash table. We use a dummy variable for this purpose. */
5158 if (types_used_by_cur_var_decl && !types_used_by_cur_var_decl->is_empty ())
5159 {
5160 struct varpool_node *node;
5161 char *label;
5162
5163 ASM_FORMAT_PRIVATE_NAME (label, first_global_object_name, 0);
5164 dummy_global
5165 = build_decl (BUILTINS_LOCATION, VAR_DECL, get_identifier (label),
5166 void_type_node);
5167 DECL_HARD_REGISTER (dummy_global) = 1;
5168 TREE_STATIC (dummy_global) = 1;
5169 node = varpool_node::get_create (dummy_global);
5170 node->definition = 1;
5171 node->definition = 1;
5172 node->force_output = 1;
5173
5174 while (!types_used_by_cur_var_decl->is_empty ())
5175 {
5176 tree t = types_used_by_cur_var_decl->pop ();
5177 types_used_by_var_decl_insert (t, dummy_global);
5178 }
5179 }
5180
5181 /* Output debug information for all global type declarations first. This
5182 ensures that global types whose compilation hasn't been finalized yet,
5183 for example pointers to Taft amendment types, have their compilation
5184 finalized in the right context. */
5185 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5186 if (TREE_CODE (iter) == TYPE_DECL && !DECL_IGNORED_P (iter))
5187 debug_hooks->global_decl (iter);
5188
5189 /* Proceed to optimize and emit assembly. */
5190 finalize_compilation_unit ();
5191
5192 /* After cgraph has had a chance to emit everything that's going to
5193 be emitted, output debug information for the rest of globals. */
5194 if (!seen_error ())
5195 {
5196 timevar_push (TV_SYMOUT);
5197 FOR_EACH_VEC_SAFE_ELT (global_decls, i, iter)
5198 if (TREE_CODE (iter) != TYPE_DECL && !DECL_IGNORED_P (iter))
5199 debug_hooks->global_decl (iter);
5200 timevar_pop (TV_SYMOUT);
5201 }
5202 }
5203
5204 /* ************************************************************************
5205 * * GCC builtins support *
5206 * ************************************************************************ */
5207
5208 /* The general scheme is fairly simple:
5209
5210 For each builtin function/type to be declared, gnat_install_builtins calls
5211 internal facilities which eventually get to gnat_push_decl, which in turn
5212 tracks the so declared builtin function decls in the 'builtin_decls' global
5213 datastructure. When an Intrinsic subprogram declaration is processed, we
5214 search this global datastructure to retrieve the associated BUILT_IN DECL
5215 node. */
5216
5217 /* Search the chain of currently available builtin declarations for a node
5218 corresponding to function NAME (an IDENTIFIER_NODE). Return the first node
5219 found, if any, or NULL_TREE otherwise. */
5220 tree
5221 builtin_decl_for (tree name)
5222 {
5223 unsigned i;
5224 tree decl;
5225
5226 FOR_EACH_VEC_SAFE_ELT (builtin_decls, i, decl)
5227 if (DECL_NAME (decl) == name)
5228 return decl;
5229
5230 return NULL_TREE;
5231 }
5232
5233 /* The code below eventually exposes gnat_install_builtins, which declares
5234 the builtin types and functions we might need, either internally or as
5235 user accessible facilities.
5236
5237 ??? This is a first implementation shot, still in rough shape. It is
5238 heavily inspired from the "C" family implementation, with chunks copied
5239 verbatim from there.
5240
5241 Two obvious TODO candidates are
5242 o Use a more efficient name/decl mapping scheme
5243 o Devise a middle-end infrastructure to avoid having to copy
5244 pieces between front-ends. */
5245
5246 /* ----------------------------------------------------------------------- *
5247 * BUILTIN ELEMENTARY TYPES *
5248 * ----------------------------------------------------------------------- */
5249
5250 /* Standard data types to be used in builtin argument declarations. */
5251
5252 enum c_tree_index
5253 {
5254 CTI_SIGNED_SIZE_TYPE, /* For format checking only. */
5255 CTI_STRING_TYPE,
5256 CTI_CONST_STRING_TYPE,
5257
5258 CTI_MAX
5259 };
5260
5261 static tree c_global_trees[CTI_MAX];
5262
5263 #define signed_size_type_node c_global_trees[CTI_SIGNED_SIZE_TYPE]
5264 #define string_type_node c_global_trees[CTI_STRING_TYPE]
5265 #define const_string_type_node c_global_trees[CTI_CONST_STRING_TYPE]
5266
5267 /* ??? In addition some attribute handlers, we currently don't support a
5268 (small) number of builtin-types, which in turns inhibits support for a
5269 number of builtin functions. */
5270 #define wint_type_node void_type_node
5271 #define intmax_type_node void_type_node
5272 #define uintmax_type_node void_type_node
5273
5274 /* Build the void_list_node (void_type_node having been created). */
5275
5276 static tree
5277 build_void_list_node (void)
5278 {
5279 tree t = build_tree_list (NULL_TREE, void_type_node);
5280 return t;
5281 }
5282
5283 /* Used to help initialize the builtin-types.def table. When a type of
5284 the correct size doesn't exist, use error_mark_node instead of NULL.
5285 The later results in segfaults even when a decl using the type doesn't
5286 get invoked. */
5287
5288 static tree
5289 builtin_type_for_size (int size, bool unsignedp)
5290 {
5291 tree type = gnat_type_for_size (size, unsignedp);
5292 return type ? type : error_mark_node;
5293 }
5294
5295 /* Build/push the elementary type decls that builtin functions/types
5296 will need. */
5297
5298 static void
5299 install_builtin_elementary_types (void)
5300 {
5301 signed_size_type_node = gnat_signed_type (size_type_node);
5302 pid_type_node = integer_type_node;
5303 void_list_node = build_void_list_node ();
5304
5305 string_type_node = build_pointer_type (char_type_node);
5306 const_string_type_node
5307 = build_pointer_type (build_qualified_type
5308 (char_type_node, TYPE_QUAL_CONST));
5309 }
5310
5311 /* ----------------------------------------------------------------------- *
5312 * BUILTIN FUNCTION TYPES *
5313 * ----------------------------------------------------------------------- */
5314
5315 /* Now, builtin function types per se. */
5316
5317 enum c_builtin_type
5318 {
5319 #define DEF_PRIMITIVE_TYPE(NAME, VALUE) NAME,
5320 #define DEF_FUNCTION_TYPE_0(NAME, RETURN) NAME,
5321 #define DEF_FUNCTION_TYPE_1(NAME, RETURN, ARG1) NAME,
5322 #define DEF_FUNCTION_TYPE_2(NAME, RETURN, ARG1, ARG2) NAME,
5323 #define DEF_FUNCTION_TYPE_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5324 #define DEF_FUNCTION_TYPE_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5325 #define DEF_FUNCTION_TYPE_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) NAME,
5326 #define DEF_FUNCTION_TYPE_6(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5327 ARG6) NAME,
5328 #define DEF_FUNCTION_TYPE_7(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5329 ARG6, ARG7) NAME,
5330 #define DEF_FUNCTION_TYPE_8(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5331 ARG6, ARG7, ARG8) NAME,
5332 #define DEF_FUNCTION_TYPE_VAR_0(NAME, RETURN) NAME,
5333 #define DEF_FUNCTION_TYPE_VAR_1(NAME, RETURN, ARG1) NAME,
5334 #define DEF_FUNCTION_TYPE_VAR_2(NAME, RETURN, ARG1, ARG2) NAME,
5335 #define DEF_FUNCTION_TYPE_VAR_3(NAME, RETURN, ARG1, ARG2, ARG3) NAME,
5336 #define DEF_FUNCTION_TYPE_VAR_4(NAME, RETURN, ARG1, ARG2, ARG3, ARG4) NAME,
5337 #define DEF_FUNCTION_TYPE_VAR_5(NAME, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5338 NAME,
5339 #define DEF_POINTER_TYPE(NAME, TYPE) NAME,
5340 #include "builtin-types.def"
5341 #undef DEF_PRIMITIVE_TYPE
5342 #undef DEF_FUNCTION_TYPE_0
5343 #undef DEF_FUNCTION_TYPE_1
5344 #undef DEF_FUNCTION_TYPE_2
5345 #undef DEF_FUNCTION_TYPE_3
5346 #undef DEF_FUNCTION_TYPE_4
5347 #undef DEF_FUNCTION_TYPE_5
5348 #undef DEF_FUNCTION_TYPE_6
5349 #undef DEF_FUNCTION_TYPE_7
5350 #undef DEF_FUNCTION_TYPE_8
5351 #undef DEF_FUNCTION_TYPE_VAR_0
5352 #undef DEF_FUNCTION_TYPE_VAR_1
5353 #undef DEF_FUNCTION_TYPE_VAR_2
5354 #undef DEF_FUNCTION_TYPE_VAR_3
5355 #undef DEF_FUNCTION_TYPE_VAR_4
5356 #undef DEF_FUNCTION_TYPE_VAR_5
5357 #undef DEF_POINTER_TYPE
5358 BT_LAST
5359 };
5360
5361 typedef enum c_builtin_type builtin_type;
5362
5363 /* A temporary array used in communication with def_fn_type. */
5364 static GTY(()) tree builtin_types[(int) BT_LAST + 1];
5365
5366 /* A helper function for install_builtin_types. Build function type
5367 for DEF with return type RET and N arguments. If VAR is true, then the
5368 function should be variadic after those N arguments.
5369
5370 Takes special care not to ICE if any of the types involved are
5371 error_mark_node, which indicates that said type is not in fact available
5372 (see builtin_type_for_size). In which case the function type as a whole
5373 should be error_mark_node. */
5374
5375 static void
5376 def_fn_type (builtin_type def, builtin_type ret, bool var, int n, ...)
5377 {
5378 tree t;
5379 tree *args = XALLOCAVEC (tree, n);
5380 va_list list;
5381 int i;
5382
5383 va_start (list, n);
5384 for (i = 0; i < n; ++i)
5385 {
5386 builtin_type a = (builtin_type) va_arg (list, int);
5387 t = builtin_types[a];
5388 if (t == error_mark_node)
5389 goto egress;
5390 args[i] = t;
5391 }
5392
5393 t = builtin_types[ret];
5394 if (t == error_mark_node)
5395 goto egress;
5396 if (var)
5397 t = build_varargs_function_type_array (t, n, args);
5398 else
5399 t = build_function_type_array (t, n, args);
5400
5401 egress:
5402 builtin_types[def] = t;
5403 va_end (list);
5404 }
5405
5406 /* Build the builtin function types and install them in the builtin_types
5407 array for later use in builtin function decls. */
5408
5409 static void
5410 install_builtin_function_types (void)
5411 {
5412 tree va_list_ref_type_node;
5413 tree va_list_arg_type_node;
5414
5415 if (TREE_CODE (va_list_type_node) == ARRAY_TYPE)
5416 {
5417 va_list_arg_type_node = va_list_ref_type_node =
5418 build_pointer_type (TREE_TYPE (va_list_type_node));
5419 }
5420 else
5421 {
5422 va_list_arg_type_node = va_list_type_node;
5423 va_list_ref_type_node = build_reference_type (va_list_type_node);
5424 }
5425
5426 #define DEF_PRIMITIVE_TYPE(ENUM, VALUE) \
5427 builtin_types[ENUM] = VALUE;
5428 #define DEF_FUNCTION_TYPE_0(ENUM, RETURN) \
5429 def_fn_type (ENUM, RETURN, 0, 0);
5430 #define DEF_FUNCTION_TYPE_1(ENUM, RETURN, ARG1) \
5431 def_fn_type (ENUM, RETURN, 0, 1, ARG1);
5432 #define DEF_FUNCTION_TYPE_2(ENUM, RETURN, ARG1, ARG2) \
5433 def_fn_type (ENUM, RETURN, 0, 2, ARG1, ARG2);
5434 #define DEF_FUNCTION_TYPE_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5435 def_fn_type (ENUM, RETURN, 0, 3, ARG1, ARG2, ARG3);
5436 #define DEF_FUNCTION_TYPE_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5437 def_fn_type (ENUM, RETURN, 0, 4, ARG1, ARG2, ARG3, ARG4);
5438 #define DEF_FUNCTION_TYPE_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5439 def_fn_type (ENUM, RETURN, 0, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5440 #define DEF_FUNCTION_TYPE_6(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5441 ARG6) \
5442 def_fn_type (ENUM, RETURN, 0, 6, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6);
5443 #define DEF_FUNCTION_TYPE_7(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5444 ARG6, ARG7) \
5445 def_fn_type (ENUM, RETURN, 0, 7, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, ARG7);
5446 #define DEF_FUNCTION_TYPE_8(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5, \
5447 ARG6, ARG7, ARG8) \
5448 def_fn_type (ENUM, RETURN, 0, 8, ARG1, ARG2, ARG3, ARG4, ARG5, ARG6, \
5449 ARG7, ARG8);
5450 #define DEF_FUNCTION_TYPE_VAR_0(ENUM, RETURN) \
5451 def_fn_type (ENUM, RETURN, 1, 0);
5452 #define DEF_FUNCTION_TYPE_VAR_1(ENUM, RETURN, ARG1) \
5453 def_fn_type (ENUM, RETURN, 1, 1, ARG1);
5454 #define DEF_FUNCTION_TYPE_VAR_2(ENUM, RETURN, ARG1, ARG2) \
5455 def_fn_type (ENUM, RETURN, 1, 2, ARG1, ARG2);
5456 #define DEF_FUNCTION_TYPE_VAR_3(ENUM, RETURN, ARG1, ARG2, ARG3) \
5457 def_fn_type (ENUM, RETURN, 1, 3, ARG1, ARG2, ARG3);
5458 #define DEF_FUNCTION_TYPE_VAR_4(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4) \
5459 def_fn_type (ENUM, RETURN, 1, 4, ARG1, ARG2, ARG3, ARG4);
5460 #define DEF_FUNCTION_TYPE_VAR_5(ENUM, RETURN, ARG1, ARG2, ARG3, ARG4, ARG5) \
5461 def_fn_type (ENUM, RETURN, 1, 5, ARG1, ARG2, ARG3, ARG4, ARG5);
5462 #define DEF_POINTER_TYPE(ENUM, TYPE) \
5463 builtin_types[(int) ENUM] = build_pointer_type (builtin_types[(int) TYPE]);
5464
5465 #include "builtin-types.def"
5466
5467 #undef DEF_PRIMITIVE_TYPE
5468 #undef DEF_FUNCTION_TYPE_0
5469 #undef DEF_FUNCTION_TYPE_1
5470 #undef DEF_FUNCTION_TYPE_2
5471 #undef DEF_FUNCTION_TYPE_3
5472 #undef DEF_FUNCTION_TYPE_4
5473 #undef DEF_FUNCTION_TYPE_5
5474 #undef DEF_FUNCTION_TYPE_6
5475 #undef DEF_FUNCTION_TYPE_7
5476 #undef DEF_FUNCTION_TYPE_8
5477 #undef DEF_FUNCTION_TYPE_VAR_0
5478 #undef DEF_FUNCTION_TYPE_VAR_1
5479 #undef DEF_FUNCTION_TYPE_VAR_2
5480 #undef DEF_FUNCTION_TYPE_VAR_3
5481 #undef DEF_FUNCTION_TYPE_VAR_4
5482 #undef DEF_FUNCTION_TYPE_VAR_5
5483 #undef DEF_POINTER_TYPE
5484 builtin_types[(int) BT_LAST] = NULL_TREE;
5485 }
5486
5487 /* ----------------------------------------------------------------------- *
5488 * BUILTIN ATTRIBUTES *
5489 * ----------------------------------------------------------------------- */
5490
5491 enum built_in_attribute
5492 {
5493 #define DEF_ATTR_NULL_TREE(ENUM) ENUM,
5494 #define DEF_ATTR_INT(ENUM, VALUE) ENUM,
5495 #define DEF_ATTR_STRING(ENUM, VALUE) ENUM,
5496 #define DEF_ATTR_IDENT(ENUM, STRING) ENUM,
5497 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) ENUM,
5498 #include "builtin-attrs.def"
5499 #undef DEF_ATTR_NULL_TREE
5500 #undef DEF_ATTR_INT
5501 #undef DEF_ATTR_STRING
5502 #undef DEF_ATTR_IDENT
5503 #undef DEF_ATTR_TREE_LIST
5504 ATTR_LAST
5505 };
5506
5507 static GTY(()) tree built_in_attributes[(int) ATTR_LAST];
5508
5509 static void
5510 install_builtin_attributes (void)
5511 {
5512 /* Fill in the built_in_attributes array. */
5513 #define DEF_ATTR_NULL_TREE(ENUM) \
5514 built_in_attributes[(int) ENUM] = NULL_TREE;
5515 #define DEF_ATTR_INT(ENUM, VALUE) \
5516 built_in_attributes[(int) ENUM] = build_int_cst (NULL_TREE, VALUE);
5517 #define DEF_ATTR_STRING(ENUM, VALUE) \
5518 built_in_attributes[(int) ENUM] = build_string (strlen (VALUE), VALUE);
5519 #define DEF_ATTR_IDENT(ENUM, STRING) \
5520 built_in_attributes[(int) ENUM] = get_identifier (STRING);
5521 #define DEF_ATTR_TREE_LIST(ENUM, PURPOSE, VALUE, CHAIN) \
5522 built_in_attributes[(int) ENUM] \
5523 = tree_cons (built_in_attributes[(int) PURPOSE], \
5524 built_in_attributes[(int) VALUE], \
5525 built_in_attributes[(int) CHAIN]);
5526 #include "builtin-attrs.def"
5527 #undef DEF_ATTR_NULL_TREE
5528 #undef DEF_ATTR_INT
5529 #undef DEF_ATTR_STRING
5530 #undef DEF_ATTR_IDENT
5531 #undef DEF_ATTR_TREE_LIST
5532 }
5533
5534 /* Handle a "const" attribute; arguments as in
5535 struct attribute_spec.handler. */
5536
5537 static tree
5538 handle_const_attribute (tree *node, tree ARG_UNUSED (name),
5539 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5540 bool *no_add_attrs)
5541 {
5542 if (TREE_CODE (*node) == FUNCTION_DECL)
5543 TREE_READONLY (*node) = 1;
5544 else
5545 *no_add_attrs = true;
5546
5547 return NULL_TREE;
5548 }
5549
5550 /* Handle a "nothrow" attribute; arguments as in
5551 struct attribute_spec.handler. */
5552
5553 static tree
5554 handle_nothrow_attribute (tree *node, tree ARG_UNUSED (name),
5555 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5556 bool *no_add_attrs)
5557 {
5558 if (TREE_CODE (*node) == FUNCTION_DECL)
5559 TREE_NOTHROW (*node) = 1;
5560 else
5561 *no_add_attrs = true;
5562
5563 return NULL_TREE;
5564 }
5565
5566 /* Handle a "pure" attribute; arguments as in
5567 struct attribute_spec.handler. */
5568
5569 static tree
5570 handle_pure_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5571 int ARG_UNUSED (flags), bool *no_add_attrs)
5572 {
5573 if (TREE_CODE (*node) == FUNCTION_DECL)
5574 DECL_PURE_P (*node) = 1;
5575 /* ??? TODO: Support types. */
5576 else
5577 {
5578 warning (OPT_Wattributes, "%qs attribute ignored",
5579 IDENTIFIER_POINTER (name));
5580 *no_add_attrs = true;
5581 }
5582
5583 return NULL_TREE;
5584 }
5585
5586 /* Handle a "no vops" attribute; arguments as in
5587 struct attribute_spec.handler. */
5588
5589 static tree
5590 handle_novops_attribute (tree *node, tree ARG_UNUSED (name),
5591 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5592 bool *ARG_UNUSED (no_add_attrs))
5593 {
5594 gcc_assert (TREE_CODE (*node) == FUNCTION_DECL);
5595 DECL_IS_NOVOPS (*node) = 1;
5596 return NULL_TREE;
5597 }
5598
5599 /* Helper for nonnull attribute handling; fetch the operand number
5600 from the attribute argument list. */
5601
5602 static bool
5603 get_nonnull_operand (tree arg_num_expr, unsigned HOST_WIDE_INT *valp)
5604 {
5605 /* Verify the arg number is a constant. */
5606 if (!tree_fits_uhwi_p (arg_num_expr))
5607 return false;
5608
5609 *valp = TREE_INT_CST_LOW (arg_num_expr);
5610 return true;
5611 }
5612
5613 /* Handle the "nonnull" attribute. */
5614 static tree
5615 handle_nonnull_attribute (tree *node, tree ARG_UNUSED (name),
5616 tree args, int ARG_UNUSED (flags),
5617 bool *no_add_attrs)
5618 {
5619 tree type = *node;
5620 unsigned HOST_WIDE_INT attr_arg_num;
5621
5622 /* If no arguments are specified, all pointer arguments should be
5623 non-null. Verify a full prototype is given so that the arguments
5624 will have the correct types when we actually check them later. */
5625 if (!args)
5626 {
5627 if (!prototype_p (type))
5628 {
5629 error ("nonnull attribute without arguments on a non-prototype");
5630 *no_add_attrs = true;
5631 }
5632 return NULL_TREE;
5633 }
5634
5635 /* Argument list specified. Verify that each argument number references
5636 a pointer argument. */
5637 for (attr_arg_num = 1; args; args = TREE_CHAIN (args))
5638 {
5639 unsigned HOST_WIDE_INT arg_num = 0, ck_num;
5640
5641 if (!get_nonnull_operand (TREE_VALUE (args), &arg_num))
5642 {
5643 error ("nonnull argument has invalid operand number (argument %lu)",
5644 (unsigned long) attr_arg_num);
5645 *no_add_attrs = true;
5646 return NULL_TREE;
5647 }
5648
5649 if (prototype_p (type))
5650 {
5651 function_args_iterator iter;
5652 tree argument;
5653
5654 function_args_iter_init (&iter, type);
5655 for (ck_num = 1; ; ck_num++, function_args_iter_next (&iter))
5656 {
5657 argument = function_args_iter_cond (&iter);
5658 if (!argument || ck_num == arg_num)
5659 break;
5660 }
5661
5662 if (!argument
5663 || TREE_CODE (argument) == VOID_TYPE)
5664 {
5665 error ("nonnull argument with out-of-range operand number "
5666 "(argument %lu, operand %lu)",
5667 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5668 *no_add_attrs = true;
5669 return NULL_TREE;
5670 }
5671
5672 if (TREE_CODE (argument) != POINTER_TYPE)
5673 {
5674 error ("nonnull argument references non-pointer operand "
5675 "(argument %lu, operand %lu)",
5676 (unsigned long) attr_arg_num, (unsigned long) arg_num);
5677 *no_add_attrs = true;
5678 return NULL_TREE;
5679 }
5680 }
5681 }
5682
5683 return NULL_TREE;
5684 }
5685
5686 /* Handle a "sentinel" attribute. */
5687
5688 static tree
5689 handle_sentinel_attribute (tree *node, tree name, tree args,
5690 int ARG_UNUSED (flags), bool *no_add_attrs)
5691 {
5692 if (!prototype_p (*node))
5693 {
5694 warning (OPT_Wattributes,
5695 "%qs attribute requires prototypes with named arguments",
5696 IDENTIFIER_POINTER (name));
5697 *no_add_attrs = true;
5698 }
5699 else
5700 {
5701 if (!stdarg_p (*node))
5702 {
5703 warning (OPT_Wattributes,
5704 "%qs attribute only applies to variadic functions",
5705 IDENTIFIER_POINTER (name));
5706 *no_add_attrs = true;
5707 }
5708 }
5709
5710 if (args)
5711 {
5712 tree position = TREE_VALUE (args);
5713
5714 if (TREE_CODE (position) != INTEGER_CST)
5715 {
5716 warning (0, "requested position is not an integer constant");
5717 *no_add_attrs = true;
5718 }
5719 else
5720 {
5721 if (tree_int_cst_lt (position, integer_zero_node))
5722 {
5723 warning (0, "requested position is less than zero");
5724 *no_add_attrs = true;
5725 }
5726 }
5727 }
5728
5729 return NULL_TREE;
5730 }
5731
5732 /* Handle a "noreturn" attribute; arguments as in
5733 struct attribute_spec.handler. */
5734
5735 static tree
5736 handle_noreturn_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5737 int ARG_UNUSED (flags), bool *no_add_attrs)
5738 {
5739 tree type = TREE_TYPE (*node);
5740
5741 /* See FIXME comment in c_common_attribute_table. */
5742 if (TREE_CODE (*node) == FUNCTION_DECL)
5743 TREE_THIS_VOLATILE (*node) = 1;
5744 else if (TREE_CODE (type) == POINTER_TYPE
5745 && TREE_CODE (TREE_TYPE (type)) == FUNCTION_TYPE)
5746 TREE_TYPE (*node)
5747 = build_pointer_type
5748 (build_type_variant (TREE_TYPE (type),
5749 TYPE_READONLY (TREE_TYPE (type)), 1));
5750 else
5751 {
5752 warning (OPT_Wattributes, "%qs attribute ignored",
5753 IDENTIFIER_POINTER (name));
5754 *no_add_attrs = true;
5755 }
5756
5757 return NULL_TREE;
5758 }
5759
5760 /* Handle a "leaf" attribute; arguments as in
5761 struct attribute_spec.handler. */
5762
5763 static tree
5764 handle_leaf_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5765 int ARG_UNUSED (flags), bool *no_add_attrs)
5766 {
5767 if (TREE_CODE (*node) != FUNCTION_DECL)
5768 {
5769 warning (OPT_Wattributes, "%qE attribute ignored", name);
5770 *no_add_attrs = true;
5771 }
5772 if (!TREE_PUBLIC (*node))
5773 {
5774 warning (OPT_Wattributes, "%qE attribute has no effect", name);
5775 *no_add_attrs = true;
5776 }
5777
5778 return NULL_TREE;
5779 }
5780
5781 /* Handle a "always_inline" attribute; arguments as in
5782 struct attribute_spec.handler. */
5783
5784 static tree
5785 handle_always_inline_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5786 int ARG_UNUSED (flags), bool *no_add_attrs)
5787 {
5788 if (TREE_CODE (*node) == FUNCTION_DECL)
5789 {
5790 /* Set the attribute and mark it for disregarding inline limits. */
5791 DECL_DISREGARD_INLINE_LIMITS (*node) = 1;
5792 }
5793 else
5794 {
5795 warning (OPT_Wattributes, "%qE attribute ignored", name);
5796 *no_add_attrs = true;
5797 }
5798
5799 return NULL_TREE;
5800 }
5801
5802 /* Handle a "malloc" attribute; arguments as in
5803 struct attribute_spec.handler. */
5804
5805 static tree
5806 handle_malloc_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5807 int ARG_UNUSED (flags), bool *no_add_attrs)
5808 {
5809 if (TREE_CODE (*node) == FUNCTION_DECL
5810 && POINTER_TYPE_P (TREE_TYPE (TREE_TYPE (*node))))
5811 DECL_IS_MALLOC (*node) = 1;
5812 else
5813 {
5814 warning (OPT_Wattributes, "%qs attribute ignored",
5815 IDENTIFIER_POINTER (name));
5816 *no_add_attrs = true;
5817 }
5818
5819 return NULL_TREE;
5820 }
5821
5822 /* Fake handler for attributes we don't properly support. */
5823
5824 tree
5825 fake_attribute_handler (tree * ARG_UNUSED (node),
5826 tree ARG_UNUSED (name),
5827 tree ARG_UNUSED (args),
5828 int ARG_UNUSED (flags),
5829 bool * ARG_UNUSED (no_add_attrs))
5830 {
5831 return NULL_TREE;
5832 }
5833
5834 /* Handle a "type_generic" attribute. */
5835
5836 static tree
5837 handle_type_generic_attribute (tree *node, tree ARG_UNUSED (name),
5838 tree ARG_UNUSED (args), int ARG_UNUSED (flags),
5839 bool * ARG_UNUSED (no_add_attrs))
5840 {
5841 /* Ensure we have a function type. */
5842 gcc_assert (TREE_CODE (*node) == FUNCTION_TYPE);
5843
5844 /* Ensure we have a variadic function. */
5845 gcc_assert (!prototype_p (*node) || stdarg_p (*node));
5846
5847 return NULL_TREE;
5848 }
5849
5850 /* Handle a "vector_size" attribute; arguments as in
5851 struct attribute_spec.handler. */
5852
5853 static tree
5854 handle_vector_size_attribute (tree *node, tree name, tree args,
5855 int ARG_UNUSED (flags), bool *no_add_attrs)
5856 {
5857 tree type = *node;
5858 tree vector_type;
5859
5860 *no_add_attrs = true;
5861
5862 /* We need to provide for vector pointers, vector arrays, and
5863 functions returning vectors. For example:
5864
5865 __attribute__((vector_size(16))) short *foo;
5866
5867 In this case, the mode is SI, but the type being modified is
5868 HI, so we need to look further. */
5869 while (POINTER_TYPE_P (type)
5870 || TREE_CODE (type) == FUNCTION_TYPE
5871 || TREE_CODE (type) == ARRAY_TYPE)
5872 type = TREE_TYPE (type);
5873
5874 vector_type = build_vector_type_for_size (type, TREE_VALUE (args), name);
5875 if (!vector_type)
5876 return NULL_TREE;
5877
5878 /* Build back pointers if needed. */
5879 *node = reconstruct_complex_type (*node, vector_type);
5880
5881 return NULL_TREE;
5882 }
5883
5884 /* Handle a "vector_type" attribute; arguments as in
5885 struct attribute_spec.handler. */
5886
5887 static tree
5888 handle_vector_type_attribute (tree *node, tree name, tree ARG_UNUSED (args),
5889 int ARG_UNUSED (flags), bool *no_add_attrs)
5890 {
5891 tree type = *node;
5892 tree vector_type;
5893
5894 *no_add_attrs = true;
5895
5896 if (TREE_CODE (type) != ARRAY_TYPE)
5897 {
5898 error ("attribute %qs applies to array types only",
5899 IDENTIFIER_POINTER (name));
5900 return NULL_TREE;
5901 }
5902
5903 vector_type = build_vector_type_for_array (type, name);
5904 if (!vector_type)
5905 return NULL_TREE;
5906
5907 TYPE_REPRESENTATIVE_ARRAY (vector_type) = type;
5908 *node = vector_type;
5909
5910 return NULL_TREE;
5911 }
5912
5913 /* ----------------------------------------------------------------------- *
5914 * BUILTIN FUNCTIONS *
5915 * ----------------------------------------------------------------------- */
5916
5917 /* Worker for DEF_BUILTIN. Possibly define a builtin function with one or two
5918 names. Does not declare a non-__builtin_ function if flag_no_builtin, or
5919 if nonansi_p and flag_no_nonansi_builtin. */
5920
5921 static void
5922 def_builtin_1 (enum built_in_function fncode,
5923 const char *name,
5924 enum built_in_class fnclass,
5925 tree fntype, tree libtype,
5926 bool both_p, bool fallback_p,
5927 bool nonansi_p ATTRIBUTE_UNUSED,
5928 tree fnattrs, bool implicit_p)
5929 {
5930 tree decl;
5931 const char *libname;
5932
5933 /* Preserve an already installed decl. It most likely was setup in advance
5934 (e.g. as part of the internal builtins) for specific reasons. */
5935 if (builtin_decl_explicit (fncode) != NULL_TREE)
5936 return;
5937
5938 gcc_assert ((!both_p && !fallback_p)
5939 || !strncmp (name, "__builtin_",
5940 strlen ("__builtin_")));
5941
5942 libname = name + strlen ("__builtin_");
5943 decl = add_builtin_function (name, fntype, fncode, fnclass,
5944 (fallback_p ? libname : NULL),
5945 fnattrs);
5946 if (both_p)
5947 /* ??? This is normally further controlled by command-line options
5948 like -fno-builtin, but we don't have them for Ada. */
5949 add_builtin_function (libname, libtype, fncode, fnclass,
5950 NULL, fnattrs);
5951
5952 set_builtin_decl (fncode, decl, implicit_p);
5953 }
5954
5955 static int flag_isoc94 = 0;
5956 static int flag_isoc99 = 0;
5957 static int flag_isoc11 = 0;
5958
5959 /* Install what the common builtins.def offers. */
5960
5961 static void
5962 install_builtin_functions (void)
5963 {
5964 #define DEF_BUILTIN(ENUM, NAME, CLASS, TYPE, LIBTYPE, BOTH_P, FALLBACK_P, \
5965 NONANSI_P, ATTRS, IMPLICIT, COND) \
5966 if (NAME && COND) \
5967 def_builtin_1 (ENUM, NAME, CLASS, \
5968 builtin_types[(int) TYPE], \
5969 builtin_types[(int) LIBTYPE], \
5970 BOTH_P, FALLBACK_P, NONANSI_P, \
5971 built_in_attributes[(int) ATTRS], IMPLICIT);
5972 #include "builtins.def"
5973 #undef DEF_BUILTIN
5974 }
5975
5976 /* ----------------------------------------------------------------------- *
5977 * BUILTIN FUNCTIONS *
5978 * ----------------------------------------------------------------------- */
5979
5980 /* Install the builtin functions we might need. */
5981
5982 void
5983 gnat_install_builtins (void)
5984 {
5985 install_builtin_elementary_types ();
5986 install_builtin_function_types ();
5987 install_builtin_attributes ();
5988
5989 /* Install builtins used by generic middle-end pieces first. Some of these
5990 know about internal specificities and control attributes accordingly, for
5991 instance __builtin_alloca vs no-throw and -fstack-check. We will ignore
5992 the generic definition from builtins.def. */
5993 build_common_builtin_nodes ();
5994
5995 /* Now, install the target specific builtins, such as the AltiVec family on
5996 ppc, and the common set as exposed by builtins.def. */
5997 targetm.init_builtins ();
5998 install_builtin_functions ();
5999 }
6000
6001 #include "gt-ada-utils.h"
6002 #include "gtype-ada.h"