]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils2.cc
Change references of .c files to .cc files
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils2.cc
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2022, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "memmodel.h"
30 #include "tm.h"
31 #include "vec.h"
32 #include "alias.h"
33 #include "tree.h"
34 #include "inchash.h"
35 #include "builtins.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
39 #include "varasm.h"
40 #include "flags.h"
41 #include "toplev.h"
42 #include "ggc.h"
43 #include "tree-inline.h"
44
45 #include "ada.h"
46 #include "types.h"
47 #include "atree.h"
48 #include "elists.h"
49 #include "namet.h"
50 #include "nlists.h"
51 #include "snames.h"
52 #include "stringt.h"
53 #include "uintp.h"
54 #include "fe.h"
55 #include "sinfo.h"
56 #include "einfo.h"
57 #include "ada-tree.h"
58 #include "gigi.h"
59
60 /* Return the base type of TYPE. */
61
62 tree
63 get_base_type (tree type)
64 {
65 if (TREE_CODE (type) == RECORD_TYPE
66 && TYPE_JUSTIFIED_MODULAR_P (type))
67 type = TREE_TYPE (TYPE_FIELDS (type));
68
69 while (TREE_TYPE (type)
70 && (TREE_CODE (type) == INTEGER_TYPE
71 || TREE_CODE (type) == REAL_TYPE))
72 type = TREE_TYPE (type);
73
74 return type;
75 }
76
77 /* EXP is a GCC tree representing an address. See if we can find how strictly
78 the object at this address is aligned and, if so, return the alignment of
79 the object in bits. Otherwise return 0. */
80
81 unsigned int
82 known_alignment (tree exp)
83 {
84 unsigned int this_alignment;
85 unsigned int lhs, rhs;
86
87 switch (TREE_CODE (exp))
88 {
89 CASE_CONVERT:
90 case VIEW_CONVERT_EXPR:
91 case NON_LVALUE_EXPR:
92 /* Conversions between pointers and integers don't change the alignment
93 of the underlying object. */
94 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
95 break;
96
97 case COMPOUND_EXPR:
98 /* The value of a COMPOUND_EXPR is that of its second operand. */
99 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
100 break;
101
102 case PLUS_EXPR:
103 case MINUS_EXPR:
104 /* If two addresses are added, the alignment of the result is the
105 minimum of the two alignments. */
106 lhs = known_alignment (TREE_OPERAND (exp, 0));
107 rhs = known_alignment (TREE_OPERAND (exp, 1));
108 this_alignment = MIN (lhs, rhs);
109 break;
110
111 case POINTER_PLUS_EXPR:
112 /* If this is the pattern built for aligning types, decode it. */
113 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
114 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
115 {
116 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
117 return
118 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
119 }
120
121 /* If we don't know the alignment of the offset, we assume that
122 of the base. */
123 lhs = known_alignment (TREE_OPERAND (exp, 0));
124 rhs = known_alignment (TREE_OPERAND (exp, 1));
125
126 if (rhs == 0)
127 this_alignment = lhs;
128 else
129 this_alignment = MIN (lhs, rhs);
130 break;
131
132 case COND_EXPR:
133 /* If there is a choice between two values, use the smaller one. */
134 lhs = known_alignment (TREE_OPERAND (exp, 1));
135 rhs = known_alignment (TREE_OPERAND (exp, 2));
136 this_alignment = MIN (lhs, rhs);
137 break;
138
139 case INTEGER_CST:
140 {
141 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
142 /* The first part of this represents the lowest bit in the constant,
143 but it is originally in bytes, not bits. */
144 this_alignment = (c & -c) * BITS_PER_UNIT;
145 }
146 break;
147
148 case MULT_EXPR:
149 /* If we know the alignment of just one side, use it. Otherwise,
150 use the product of the alignments. */
151 lhs = known_alignment (TREE_OPERAND (exp, 0));
152 rhs = known_alignment (TREE_OPERAND (exp, 1));
153
154 if (lhs == 0)
155 this_alignment = rhs;
156 else if (rhs == 0)
157 this_alignment = lhs;
158 else
159 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
160 break;
161
162 case BIT_AND_EXPR:
163 /* A bit-and expression is as aligned as the maximum alignment of the
164 operands. We typically get here for a complex lhs and a constant
165 negative power of two on the rhs to force an explicit alignment, so
166 don't bother looking at the lhs. */
167 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
168 break;
169
170 case ADDR_EXPR:
171 if (DECL_P (TREE_OPERAND (exp, 0)))
172 this_alignment = DECL_ALIGN (TREE_OPERAND (exp, 0));
173 else
174 this_alignment = get_object_alignment (TREE_OPERAND (exp, 0));
175 break;
176
177 case CALL_EXPR:
178 {
179 tree fndecl = get_callee_fndecl (exp);
180 if (fndecl == malloc_decl || fndecl == realloc_decl)
181 return get_target_system_allocator_alignment () * BITS_PER_UNIT;
182
183 tree t = maybe_inline_call_in_expr (exp);
184 if (t)
185 return known_alignment (t);
186 }
187
188 /* ... fall through ... */
189
190 default:
191 /* For other pointer expressions, we assume that the pointed-to object
192 is at least as aligned as the pointed-to type. Beware that we can
193 have a dummy type here (e.g. a Taft Amendment type), for which the
194 alignment is meaningless and should be ignored. */
195 if (POINTER_TYPE_P (TREE_TYPE (exp))
196 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp)))
197 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp))))
198 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
199 else
200 this_alignment = 0;
201 break;
202 }
203
204 return this_alignment;
205 }
206
207 /* We have a comparison or assignment operation on two types, T1 and T2, which
208 are either both array types or both record types. T1 is assumed to be for
209 the left hand side operand, and T2 for the right hand side. Return the
210 type that both operands should be converted to for the operation, if any.
211 Otherwise return zero. */
212
213 static tree
214 find_common_type (tree t1, tree t2)
215 {
216 /* ??? As of today, various constructs lead to here with types of different
217 sizes even when both constants (e.g. tagged types, packable vs regular
218 component types, padded vs unpadded types, ...). While some of these
219 would better be handled upstream (types should be made consistent before
220 calling into build_binary_op), some others are really expected and we
221 have to be careful. */
222
223 const bool variable_record_on_lhs
224 = (TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1)
227 && !get_variant_part (t2));
228
229 const bool variable_array_on_lhs
230 = (TREE_CODE (t1) == ARRAY_TYPE
231 && TREE_CODE (t2) == ARRAY_TYPE
232 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1)))
233 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2))));
234
235 /* We must avoid writing more than what the target can hold if this is for
236 an assignment and the case of tagged types is handled in build_binary_op
237 so we use the lhs type if it is known to be smaller or of constant size
238 and the rhs type is not, whatever the modes. We also force t1 in case of
239 constant size equality to minimize occurrences of view conversions on the
240 lhs of an assignment, except for the case of types with a variable part
241 on the lhs but not on the rhs to make the conversion simpler. */
242 if (TREE_CONSTANT (TYPE_SIZE (t1))
243 && (!TREE_CONSTANT (TYPE_SIZE (t2))
244 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
245 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
246 && !variable_record_on_lhs
247 && !variable_array_on_lhs)))
248 return t1;
249
250 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
251 a non-BLKmode rhs and array types with a variable part on the lhs but not
252 on the rhs to make sure the conversion is preserved during gimplification.
253 Note that we know that we will not have any alignment problems since, if
254 we did, the non-BLKmode type could not have been used. */
255 if (TYPE_MODE (t1) != BLKmode
256 && (TYPE_MODE (t2) == BLKmode || !variable_array_on_lhs))
257 return t1;
258
259 /* If the rhs type is of constant size, use it whatever the modes. At
260 this point it is known to be smaller, or of constant size and the
261 lhs type is not. */
262 if (TREE_CONSTANT (TYPE_SIZE (t2)))
263 return t2;
264
265 /* Otherwise, if the rhs type is non-BLKmode, use it. */
266 if (TYPE_MODE (t2) != BLKmode)
267 return t2;
268
269 /* In this case, both types have variable size and BLKmode. It's
270 probably best to leave the "type mismatch" because changing it
271 could cause a bad self-referential reference. */
272 return NULL_TREE;
273 }
274
275 /* Return an expression tree representing an equality comparison of A1 and A2,
276 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
277
278 Two arrays are equal in one of two ways: (1) if both have zero length in
279 some dimension (not necessarily the same dimension) or (2) if the lengths
280 in each dimension are equal and the data is equal. We perform the length
281 tests in as efficient a manner as possible. */
282
283 static tree
284 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
285 {
286 tree result = convert (result_type, boolean_true_node);
287 tree a1_is_null = convert (result_type, boolean_false_node);
288 tree a2_is_null = convert (result_type, boolean_false_node);
289 tree t1 = TREE_TYPE (a1);
290 tree t2 = TREE_TYPE (a2);
291 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
292 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
293 bool length_zero_p = false;
294
295 /* If the operands have side-effects, they need to be evaluated only once
296 in spite of the multiple references in the comparison. */
297 if (a1_side_effects_p)
298 a1 = gnat_protect_expr (a1);
299
300 if (a2_side_effects_p)
301 a2 = gnat_protect_expr (a2);
302
303 /* Process each dimension separately and compare the lengths. If any
304 dimension has a length known to be zero, set LENGTH_ZERO_P to true
305 in order to suppress the comparison of the data at the end. */
306 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
307 {
308 tree dom1 = TYPE_DOMAIN (t1);
309 tree dom2 = TYPE_DOMAIN (t2);
310 tree length1 = size_binop (PLUS_EXPR,
311 size_binop (MINUS_EXPR,
312 TYPE_MAX_VALUE (dom1),
313 TYPE_MIN_VALUE (dom1)),
314 size_one_node);
315 tree length2 = size_binop (PLUS_EXPR,
316 size_binop (MINUS_EXPR,
317 TYPE_MAX_VALUE (dom2),
318 TYPE_MIN_VALUE (dom2)),
319 size_one_node);
320 tree ind1 = TYPE_INDEX_TYPE (dom1);
321 tree ind2 = TYPE_INDEX_TYPE (dom2);
322 tree base_type = maybe_character_type (get_base_type (ind1));
323 tree lb1 = convert (base_type, TYPE_MIN_VALUE (ind1));
324 tree ub1 = convert (base_type, TYPE_MAX_VALUE (ind1));
325 tree lb2 = convert (base_type, TYPE_MIN_VALUE (ind2));
326 tree ub2 = convert (base_type, TYPE_MAX_VALUE (ind2));
327 tree comparison, this_a1_is_null, this_a2_is_null;
328
329 /* If the length of the first array is a constant and that of the second
330 array is not, swap our operands to have the constant second. */
331 if (TREE_CODE (length1) == INTEGER_CST
332 && TREE_CODE (length2) != INTEGER_CST)
333 {
334 tree tem;
335 bool btem;
336
337 tem = a1, a1 = a2, a2 = tem;
338 tem = t1, t1 = t2, t2 = tem;
339 tem = lb1, lb1 = lb2, lb2 = tem;
340 tem = ub1, ub1 = ub2, ub2 = tem;
341 tem = length1, length1 = length2, length2 = tem;
342 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
343 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
344 a2_side_effects_p = btem;
345 }
346
347 /* If the length of the second array is the constant zero, we can just
348 use the original stored bounds for the first array and see whether
349 last < first holds. */
350 if (integer_zerop (length2))
351 {
352 length_zero_p = true;
353
354 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
355 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
356
357 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, loc);
360
361 this_a1_is_null = comparison;
362 this_a2_is_null = convert (result_type, boolean_true_node);
363 }
364
365 /* Otherwise, if the length is some other constant value, we know that
366 this dimension in the second array cannot be superflat, so we can
367 just use its length computed from the actual stored bounds. */
368 else if (TREE_CODE (length2) == INTEGER_CST)
369 {
370 /* Note that we know that LB2 and UB2 are constant and hence
371 cannot contain a PLACEHOLDER_EXPR. */
372 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
373 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
374
375 comparison
376 = fold_build2_loc (loc, EQ_EXPR, result_type,
377 build_binary_op (MINUS_EXPR, base_type,
378 ub1, lb1),
379 build_binary_op (MINUS_EXPR, base_type,
380 ub2, lb2));
381 if (EXPR_P (comparison))
382 SET_EXPR_LOCATION (comparison, loc);
383
384 this_a1_is_null
385 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
386
387 this_a2_is_null = convert (result_type, boolean_false_node);
388 }
389
390 /* Otherwise, compare the computed lengths. */
391 else
392 {
393 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
394 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
395
396 comparison
397 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
398 if (EXPR_P (comparison))
399 SET_EXPR_LOCATION (comparison, loc);
400
401 lb1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1, a1);
402 ub1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1, a1);
403
404 this_a1_is_null
405 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
406
407 lb2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2, a2);
408 ub2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2, a2);
409
410 this_a2_is_null
411 = fold_build2_loc (loc, LT_EXPR, result_type, ub2, lb2);
412 }
413
414 /* Append expressions for this dimension to the final expressions. */
415 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
416 result, comparison);
417
418 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
419 this_a1_is_null, a1_is_null);
420
421 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
422 this_a2_is_null, a2_is_null);
423
424 t1 = TREE_TYPE (t1);
425 t2 = TREE_TYPE (t2);
426 }
427
428 /* Unless the length of some dimension is known to be zero, compare the
429 data in the array. */
430 if (!length_zero_p)
431 {
432 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
433 tree comparison;
434
435 if (type)
436 {
437 a1 = convert (type, a1),
438 a2 = convert (type, a2);
439 }
440
441 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
442
443 result
444 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
445 }
446
447 /* The result is also true if both sizes are zero. */
448 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
449 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
450 a1_is_null, a2_is_null),
451 result);
452
453 /* If the operands have side-effects, they need to be evaluated before
454 doing the tests above since the place they otherwise would end up
455 being evaluated at run time could be wrong. */
456 if (a1_side_effects_p)
457 result = build2 (COMPOUND_EXPR, result_type, a1, result);
458
459 if (a2_side_effects_p)
460 result = build2 (COMPOUND_EXPR, result_type, a2, result);
461
462 return result;
463 }
464
465 /* Return an expression tree representing an equality comparison of P1 and P2,
466 two objects of fat pointer type. The result should be of type RESULT_TYPE.
467
468 Two fat pointers are equal in one of two ways: (1) if both have a null
469 pointer to the array or (2) if they contain the same couple of pointers.
470 We perform the comparison in as efficient a manner as possible. */
471
472 static tree
473 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
474 {
475 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
476 tree p1_array_is_null, p2_array_is_null;
477
478 /* If either operand has side-effects, they have to be evaluated only once
479 in spite of the multiple references to the operand in the comparison. */
480 p1 = gnat_protect_expr (p1);
481 p2 = gnat_protect_expr (p2);
482
483 /* The constant folder doesn't fold fat pointer types so we do it here. */
484 if (TREE_CODE (p1) == CONSTRUCTOR)
485 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
486 else
487 p1_array = build_component_ref (p1, TYPE_FIELDS (TREE_TYPE (p1)), true);
488
489 p1_array_is_null
490 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
491 fold_convert_loc (loc, TREE_TYPE (p1_array),
492 null_pointer_node));
493
494 if (TREE_CODE (p2) == CONSTRUCTOR)
495 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
496 else
497 p2_array = build_component_ref (p2, TYPE_FIELDS (TREE_TYPE (p2)), true);
498
499 p2_array_is_null
500 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
501 fold_convert_loc (loc, TREE_TYPE (p2_array),
502 null_pointer_node));
503
504 /* If one of the pointers to the array is null, just compare the other. */
505 if (integer_zerop (p1_array))
506 return p2_array_is_null;
507 else if (integer_zerop (p2_array))
508 return p1_array_is_null;
509
510 /* Otherwise, do the fully-fledged comparison. */
511 same_array
512 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
513
514 if (TREE_CODE (p1) == CONSTRUCTOR)
515 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
516 else
517 p1_bounds
518 = build_component_ref (p1, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))),
519 true);
520
521 if (TREE_CODE (p2) == CONSTRUCTOR)
522 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
523 else
524 p2_bounds
525 = build_component_ref (p2, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))),
526 true);
527
528 same_bounds
529 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
530
531 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
532 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
533 build_binary_op (TRUTH_ORIF_EXPR, result_type,
534 p1_array_is_null, same_bounds));
535 }
536
537 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
538 type TYPE. We know that TYPE is a modular type with a nonbinary
539 modulus. */
540
541 static tree
542 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
543 tree rhs)
544 {
545 tree modulus = TYPE_MODULUS (type);
546 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
547 unsigned int precision;
548 bool unsignedp = true;
549 tree op_type = type;
550 tree result;
551
552 /* If this is an addition of a constant, convert it to a subtraction
553 of a constant since we can do that faster. */
554 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
555 {
556 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
557 op_code = MINUS_EXPR;
558 }
559
560 /* For the logical operations, we only need PRECISION bits. For
561 addition and subtraction, we need one more and for multiplication we
562 need twice as many. But we never want to make a size smaller than
563 our size. */
564 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
565 needed_precision += 1;
566 else if (op_code == MULT_EXPR)
567 needed_precision *= 2;
568
569 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
570
571 /* Unsigned will do for everything but subtraction. */
572 if (op_code == MINUS_EXPR)
573 unsignedp = false;
574
575 /* If our type is the wrong signedness or isn't wide enough, make a new
576 type and convert both our operands to it. */
577 if (TYPE_PRECISION (op_type) < precision
578 || TYPE_UNSIGNED (op_type) != unsignedp)
579 {
580 /* Copy the type so we ensure it can be modified to make it modular. */
581 op_type = copy_type (gnat_type_for_size (precision, unsignedp));
582 modulus = convert (op_type, modulus);
583 SET_TYPE_MODULUS (op_type, modulus);
584 TYPE_MODULAR_P (op_type) = 1;
585 lhs = convert (op_type, lhs);
586 rhs = convert (op_type, rhs);
587 }
588
589 /* Do the operation, then we'll fix it up. */
590 result = fold_build2 (op_code, op_type, lhs, rhs);
591
592 /* For multiplication, we have no choice but to do a full modulus
593 operation. However, we want to do this in the narrowest
594 possible size. */
595 if (op_code == MULT_EXPR)
596 {
597 /* Copy the type so we ensure it can be modified to make it modular. */
598 tree div_type = copy_type (gnat_type_for_size (needed_precision, 1));
599 modulus = convert (div_type, modulus);
600 SET_TYPE_MODULUS (div_type, modulus);
601 TYPE_MODULAR_P (div_type) = 1;
602 result = convert (op_type,
603 fold_build2 (TRUNC_MOD_EXPR, div_type,
604 convert (div_type, result), modulus));
605 }
606
607 /* For subtraction, add the modulus back if we are negative. */
608 else if (op_code == MINUS_EXPR)
609 {
610 result = gnat_protect_expr (result);
611 result = fold_build3 (COND_EXPR, op_type,
612 fold_build2 (LT_EXPR, boolean_type_node, result,
613 build_int_cst (op_type, 0)),
614 fold_build2 (PLUS_EXPR, op_type, result, modulus),
615 result);
616 }
617
618 /* For the other operations, subtract the modulus if we are >= it. */
619 else
620 {
621 result = gnat_protect_expr (result);
622 result = fold_build3 (COND_EXPR, op_type,
623 fold_build2 (GE_EXPR, boolean_type_node,
624 result, modulus),
625 fold_build2 (MINUS_EXPR, op_type,
626 result, modulus),
627 result);
628 }
629
630 return convert (type, result);
631 }
632
633 /* This page contains routines that implement the Ada semantics with regard
634 to atomic objects. They are fully piggybacked on the middle-end support
635 for atomic loads and stores.
636
637 *** Memory barriers and volatile objects ***
638
639 We implement the weakened form of the C.6(16) clause that was introduced
640 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
641 implementable without significant performance hits on modern platforms.
642
643 We also take advantage of the requirements imposed on shared variables by
644 9.10 (conditions for sequential actions) to have non-erroneous execution
645 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
646 volatile updates with regard to sequential actions, i.e. with regard to
647 reads or updates of atomic objects.
648
649 As such, an update of an atomic object by a task requires that all earlier
650 accesses to volatile objects have completed. Similarly, later accesses to
651 volatile objects cannot be reordered before the update of the atomic object.
652 So, memory barriers both before and after the atomic update are needed.
653
654 For a read of an atomic object, to avoid seeing writes of volatile objects
655 by a task earlier than by the other tasks, a memory barrier is needed before
656 the atomic read. Finally, to avoid reordering later reads or updates of
657 volatile objects to before the atomic read, a barrier is needed after the
658 atomic read.
659
660 So, memory barriers are needed before and after atomic reads and updates.
661 And, in order to simplify the implementation, we use full memory barriers
662 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
663
664 /* Return the size of TYPE, which must be a positive power of 2. */
665
666 unsigned int
667 resolve_atomic_size (tree type)
668 {
669 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
670
671 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
672 return size;
673
674 /* We shouldn't reach here without having already detected that the size
675 isn't compatible with an atomic access. */
676 gcc_assert (Serious_Errors_Detected);
677
678 return 0;
679 }
680
681 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
682 true if the load requires synchronization. */
683
684 tree
685 build_atomic_load (tree src, bool sync)
686 {
687 tree ptr_type
688 = build_pointer_type
689 (build_qualified_type (void_type_node,
690 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
691 tree mem_model
692 = build_int_cst (integer_type_node,
693 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
694 tree orig_src = src;
695 tree t, addr, val;
696 unsigned int size;
697 int fncode;
698
699 /* Remove conversions to get the address of the underlying object. */
700 src = remove_conversions (src, false);
701 size = resolve_atomic_size (TREE_TYPE (src));
702 if (size == 0)
703 return orig_src;
704
705 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
706 t = builtin_decl_implicit ((enum built_in_function) fncode);
707
708 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
709 val = build_call_expr (t, 2, addr, mem_model);
710
711 /* First reinterpret the loaded bits in the original type of the load,
712 then convert to the expected result type. */
713 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
714 return convert (TREE_TYPE (orig_src), t);
715 }
716
717 /* Build an atomic store from SRC to the underlying atomic object in DEST.
718 SYNC is true if the store requires synchronization. */
719
720 tree
721 build_atomic_store (tree dest, tree src, bool sync)
722 {
723 tree ptr_type
724 = build_pointer_type
725 (build_qualified_type (void_type_node,
726 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
727 tree mem_model
728 = build_int_cst (integer_type_node,
729 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
730 tree orig_dest = dest;
731 tree t, int_type, addr;
732 unsigned int size;
733 int fncode;
734
735 /* Remove conversions to get the address of the underlying object. */
736 dest = remove_conversions (dest, false);
737 size = resolve_atomic_size (TREE_TYPE (dest));
738 if (size == 0)
739 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
740
741 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
742 t = builtin_decl_implicit ((enum built_in_function) fncode);
743 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
744
745 /* First convert the bits to be stored to the original type of the store,
746 then reinterpret them in the effective type. But if the original type
747 is a padded type with the same size, convert to the inner type instead,
748 as we don't want to artificially introduce a CONSTRUCTOR here. */
749 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
750 && TYPE_SIZE (TREE_TYPE (dest))
751 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
752 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
753 else
754 src = convert (TREE_TYPE (dest), src);
755 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
756 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
757
758 return build_call_expr (t, 3, addr, src, mem_model);
759 }
760
761 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
762 the location of the sequence. Note that, even though the load and the store
763 are both atomic, the sequence itself is not atomic. */
764
765 tree
766 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
767 {
768 /* We will be modifying DEST below so we build a copy. */
769 dest = copy_node (dest);
770 tree ref = dest;
771
772 while (handled_component_p (ref))
773 {
774 /* The load should already have been generated during the translation
775 of the GNAT destination tree; find it out in the GNU tree. */
776 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
777 {
778 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
779 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
780 {
781 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
782 tree t = CALL_EXPR_ARG (op, 0);
783 tree obj, temp, stmt;
784
785 /* Find out the loaded object. */
786 if (TREE_CODE (t) == NOP_EXPR)
787 t = TREE_OPERAND (t, 0);
788 if (TREE_CODE (t) == ADDR_EXPR)
789 obj = TREE_OPERAND (t, 0);
790 else
791 obj = build1 (INDIRECT_REF, type, t);
792
793 /* Drop atomic and volatile qualifiers for the temporary. */
794 type = TYPE_MAIN_VARIANT (type);
795
796 /* And drop BLKmode, if need be, to put it into a register. */
797 if (TYPE_MODE (type) == BLKmode)
798 {
799 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
800 type = copy_type (type);
801 machine_mode mode = int_mode_for_size (size, 0).else_blk ();
802 SET_TYPE_MODE (type, mode);
803 }
804
805 /* Create the temporary by inserting a SAVE_EXPR. */
806 temp = build1 (SAVE_EXPR, type,
807 build1 (VIEW_CONVERT_EXPR, type, op));
808 TREE_OPERAND (ref, 0) = temp;
809
810 start_stmt_group ();
811
812 /* Build the modify of the temporary. */
813 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
814 add_stmt_with_node (stmt, gnat_node);
815
816 /* Build the store to the object. */
817 stmt = build_atomic_store (obj, temp, false);
818 add_stmt_with_node (stmt, gnat_node);
819
820 return end_stmt_group ();
821 }
822 }
823
824 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
825 ref = TREE_OPERAND (ref, 0);
826 }
827
828 /* Something went wrong earlier if we have not found the atomic load. */
829 gcc_unreachable ();
830 }
831
832 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
833 desired for the result. Usually the operation is to be performed
834 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
835 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
836 case the type to be used will be derived from the operands.
837 Don't fold the result if NO_FOLD is true.
838
839 This function is very much unlike the ones for C and C++ since we
840 have already done any type conversion and matching required. All we
841 have to do here is validate the work done by SEM and handle subtypes. */
842
843 tree
844 build_binary_op (enum tree_code op_code, tree result_type,
845 tree left_operand, tree right_operand,
846 bool no_fold)
847 {
848 tree left_type = TREE_TYPE (left_operand);
849 tree right_type = TREE_TYPE (right_operand);
850 tree left_base_type = get_base_type (left_type);
851 tree right_base_type = get_base_type (right_type);
852 tree operation_type = result_type;
853 tree best_type = NULL_TREE;
854 tree modulus, result;
855 bool has_side_effects = false;
856
857 if (operation_type
858 && TREE_CODE (operation_type) == RECORD_TYPE
859 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
860 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
861
862 if (operation_type && TYPE_IS_EXTRA_SUBTYPE_P (operation_type))
863 operation_type = get_base_type (operation_type);
864
865 modulus = (operation_type
866 && TREE_CODE (operation_type) == INTEGER_TYPE
867 && TYPE_MODULAR_P (operation_type)
868 ? TYPE_MODULUS (operation_type) : NULL_TREE);
869
870 switch (op_code)
871 {
872 case INIT_EXPR:
873 case MODIFY_EXPR:
874 gcc_checking_assert (!result_type);
875
876 /* If there were integral or pointer conversions on the LHS, remove
877 them; we'll be putting them back below if needed. Likewise for
878 conversions between record types, except for justified modular types.
879 But don't do this if the right operand is not BLKmode (for packed
880 arrays) unless we are not changing the mode. */
881 while ((CONVERT_EXPR_P (left_operand)
882 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
883 && (((INTEGRAL_TYPE_P (left_type)
884 || POINTER_TYPE_P (left_type))
885 && (INTEGRAL_TYPE_P (operand_type (left_operand))
886 || POINTER_TYPE_P (operand_type (left_operand))))
887 || (TREE_CODE (left_type) == RECORD_TYPE
888 && !TYPE_JUSTIFIED_MODULAR_P (left_type)
889 && TREE_CODE (operand_type (left_operand)) == RECORD_TYPE
890 && (TYPE_MODE (right_type) == BLKmode
891 || TYPE_MODE (left_type)
892 == TYPE_MODE (operand_type (left_operand))))))
893 {
894 left_operand = TREE_OPERAND (left_operand, 0);
895 left_type = TREE_TYPE (left_operand);
896 }
897
898 /* If a class-wide type may be involved, force use of the RHS type. */
899 if ((TREE_CODE (right_type) == RECORD_TYPE
900 || TREE_CODE (right_type) == UNION_TYPE)
901 && TYPE_ALIGN_OK (right_type))
902 operation_type = right_type;
903
904 /* If we are copying between padded objects with compatible types, use
905 the padded view of the objects, this is very likely more efficient.
906 Likewise for a padded object that is assigned a constructor, if we
907 can convert the constructor to the inner type, to avoid putting a
908 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
909 actually copied anything. */
910 else if (TYPE_IS_PADDING_P (left_type)
911 && TREE_CONSTANT (TYPE_SIZE (left_type))
912 && ((TREE_CODE (right_operand) == COMPONENT_REF
913 && TYPE_MAIN_VARIANT (left_type)
914 == TYPE_MAIN_VARIANT (operand_type (right_operand)))
915 || (TREE_CODE (right_operand) == CONSTRUCTOR
916 && !CONTAINS_PLACEHOLDER_P
917 (DECL_SIZE (TYPE_FIELDS (left_type)))))
918 && !integer_zerop (TYPE_SIZE (right_type)))
919 {
920 /* We make an exception for a BLKmode type padding a non-BLKmode
921 inner type and do the conversion of the LHS right away, since
922 unchecked_convert wouldn't do it properly. */
923 if (TYPE_MODE (left_type) == BLKmode
924 && TYPE_MODE (right_type) != BLKmode
925 && TREE_CODE (right_operand) != CONSTRUCTOR)
926 {
927 operation_type = right_type;
928 left_operand = convert (operation_type, left_operand);
929 left_type = operation_type;
930 }
931 else
932 operation_type = left_type;
933 }
934
935 /* If we have a call to a function that returns with variable size, use
936 the RHS type in case we want to use the return slot optimization. */
937 else if (TREE_CODE (right_operand) == CALL_EXPR
938 && return_type_with_variable_size_p (right_type))
939 operation_type = right_type;
940
941 /* Find the best type to use for copying between aggregate types. */
942 else if (((TREE_CODE (left_type) == ARRAY_TYPE
943 && TREE_CODE (right_type) == ARRAY_TYPE)
944 || (TREE_CODE (left_type) == RECORD_TYPE
945 && TREE_CODE (right_type) == RECORD_TYPE))
946 && (best_type = find_common_type (left_type, right_type)))
947 operation_type = best_type;
948
949 /* Otherwise use the LHS type. */
950 else
951 operation_type = left_type;
952
953 /* Ensure everything on the LHS is valid. If we have a field reference,
954 strip anything that get_inner_reference can handle. Then remove any
955 conversions between types having the same code and mode. And mark
956 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
957 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
958 result = left_operand;
959 while (true)
960 {
961 tree restype = TREE_TYPE (result);
962
963 if (TREE_CODE (result) == COMPONENT_REF
964 || TREE_CODE (result) == ARRAY_REF
965 || TREE_CODE (result) == ARRAY_RANGE_REF)
966 while (handled_component_p (result))
967 result = TREE_OPERAND (result, 0);
968
969 else if (TREE_CODE (result) == REALPART_EXPR
970 || TREE_CODE (result) == IMAGPART_EXPR
971 || (CONVERT_EXPR_P (result)
972 && (((TREE_CODE (restype)
973 == TREE_CODE (operand_type (result))
974 && TYPE_MODE (restype)
975 == TYPE_MODE (operand_type (result))))
976 || TYPE_ALIGN_OK (restype))))
977 result = TREE_OPERAND (result, 0);
978
979 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
980 {
981 TREE_ADDRESSABLE (result) = 1;
982 result = TREE_OPERAND (result, 0);
983 }
984
985 else
986 break;
987 }
988
989 gcc_assert (TREE_CODE (result) == INDIRECT_REF
990 || TREE_CODE (result) == NULL_EXPR
991 || TREE_CODE (result) == SAVE_EXPR
992 || DECL_P (result));
993
994 /* Convert the right operand to the operation type unless it is
995 either already of the correct type or if the type involves a
996 placeholder, since the RHS may not have the same record type. */
997 if (operation_type != right_type
998 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
999 {
1000 right_operand = convert (operation_type, right_operand);
1001 right_type = operation_type;
1002 }
1003
1004 /* If the left operand is not of the same type as the operation
1005 type, wrap it up in a VIEW_CONVERT_EXPR. */
1006 if (left_type != operation_type)
1007 left_operand = unchecked_convert (operation_type, left_operand, false);
1008
1009 has_side_effects = true;
1010 modulus = NULL_TREE;
1011 break;
1012
1013 case ARRAY_REF:
1014 if (!operation_type)
1015 operation_type = TREE_TYPE (left_type);
1016
1017 /* ... fall through ... */
1018
1019 case ARRAY_RANGE_REF:
1020 /* First look through conversion between type variants. Note that
1021 this changes neither the operation type nor the type domain. */
1022 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1023 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1024 == TYPE_MAIN_VARIANT (left_type))
1025 {
1026 left_operand = TREE_OPERAND (left_operand, 0);
1027 left_type = TREE_TYPE (left_operand);
1028 }
1029
1030 /* For a range, make sure the element type is consistent. */
1031 if (op_code == ARRAY_RANGE_REF
1032 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1033 {
1034 operation_type
1035 = build_nonshared_array_type (TREE_TYPE (left_type),
1036 TYPE_DOMAIN (operation_type));
1037 /* Declare it now since it will never be declared otherwise. This
1038 is necessary to ensure that its subtrees are properly marked. */
1039 create_type_decl (TYPE_NAME (operation_type), operation_type, true,
1040 false, Empty);
1041 }
1042
1043 /* Then convert the right operand to its base type. This will prevent
1044 unneeded sign conversions when sizetype is wider than integer. */
1045 right_operand = convert (right_base_type, right_operand);
1046 right_operand = convert_to_index_type (right_operand);
1047 modulus = NULL_TREE;
1048 break;
1049
1050 case TRUTH_ANDIF_EXPR:
1051 case TRUTH_ORIF_EXPR:
1052 case TRUTH_AND_EXPR:
1053 case TRUTH_OR_EXPR:
1054 case TRUTH_XOR_EXPR:
1055 gcc_checking_assert
1056 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1057 operation_type = left_base_type;
1058 left_operand = convert (operation_type, left_operand);
1059 right_operand = convert (operation_type, right_operand);
1060 break;
1061
1062 case GE_EXPR:
1063 case LE_EXPR:
1064 case GT_EXPR:
1065 case LT_EXPR:
1066 case EQ_EXPR:
1067 case NE_EXPR:
1068 gcc_checking_assert
1069 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1070 /* If either operand is a NULL_EXPR, just return a new one. */
1071 if (TREE_CODE (left_operand) == NULL_EXPR)
1072 return build2 (op_code, result_type,
1073 build1 (NULL_EXPR, integer_type_node,
1074 TREE_OPERAND (left_operand, 0)),
1075 integer_zero_node);
1076
1077 else if (TREE_CODE (right_operand) == NULL_EXPR)
1078 return build2 (op_code, result_type,
1079 build1 (NULL_EXPR, integer_type_node,
1080 TREE_OPERAND (right_operand, 0)),
1081 integer_zero_node);
1082
1083 /* If either object is a justified modular types, get the
1084 fields from within. */
1085 if (TREE_CODE (left_type) == RECORD_TYPE
1086 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1087 {
1088 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1089 left_operand);
1090 left_type = TREE_TYPE (left_operand);
1091 left_base_type = get_base_type (left_type);
1092 }
1093
1094 if (TREE_CODE (right_type) == RECORD_TYPE
1095 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1096 {
1097 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1098 right_operand);
1099 right_type = TREE_TYPE (right_operand);
1100 right_base_type = get_base_type (right_type);
1101 }
1102
1103 /* If both objects are arrays, compare them specially. */
1104 if ((TREE_CODE (left_type) == ARRAY_TYPE
1105 || (TREE_CODE (left_type) == INTEGER_TYPE
1106 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1107 && (TREE_CODE (right_type) == ARRAY_TYPE
1108 || (TREE_CODE (right_type) == INTEGER_TYPE
1109 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1110 {
1111 result = compare_arrays (input_location,
1112 result_type, left_operand, right_operand);
1113 if (op_code == NE_EXPR)
1114 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1115 else
1116 gcc_assert (op_code == EQ_EXPR);
1117
1118 return result;
1119 }
1120
1121 /* Otherwise, the base types must be the same, unless they are both (fat)
1122 pointer types or record types. In the latter case, use the best type
1123 and convert both operands to that type. */
1124 if (left_base_type != right_base_type)
1125 {
1126 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1127 && TYPE_IS_FAT_POINTER_P (right_base_type))
1128 {
1129 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1130 == TYPE_MAIN_VARIANT (right_base_type));
1131 best_type = left_base_type;
1132 }
1133
1134 else if (POINTER_TYPE_P (left_base_type)
1135 && POINTER_TYPE_P (right_base_type))
1136 {
1137 /* Anonymous access types in Ada 2005 can point to different
1138 members of a tagged type hierarchy. */
1139 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (left_base_type))
1140 == TYPE_MAIN_VARIANT (TREE_TYPE (right_base_type))
1141 || (TYPE_ALIGN_OK (TREE_TYPE (left_base_type))
1142 && TYPE_ALIGN_OK (TREE_TYPE (right_base_type))));
1143 best_type = left_base_type;
1144 }
1145
1146 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1147 && TREE_CODE (right_base_type) == RECORD_TYPE)
1148 {
1149 /* The only way this is permitted is if both types have the same
1150 name. In that case, one of them must not be self-referential.
1151 Use it as the best type. Even better with a fixed size. */
1152 gcc_assert (TYPE_NAME (left_base_type)
1153 && TYPE_NAME (left_base_type)
1154 == TYPE_NAME (right_base_type));
1155
1156 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1157 best_type = left_base_type;
1158 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1159 best_type = right_base_type;
1160 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1161 best_type = left_base_type;
1162 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1163 best_type = right_base_type;
1164 else
1165 gcc_unreachable ();
1166 }
1167
1168 else
1169 gcc_unreachable ();
1170
1171 left_operand = convert (best_type, left_operand);
1172 right_operand = convert (best_type, right_operand);
1173 }
1174 else
1175 {
1176 left_operand = convert (left_base_type, left_operand);
1177 right_operand = convert (right_base_type, right_operand);
1178 }
1179
1180 /* If both objects are fat pointers, compare them specially. */
1181 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1182 {
1183 result
1184 = compare_fat_pointers (input_location,
1185 result_type, left_operand, right_operand);
1186 if (op_code == NE_EXPR)
1187 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1188 else
1189 gcc_assert (op_code == EQ_EXPR);
1190
1191 return result;
1192 }
1193
1194 modulus = NULL_TREE;
1195 break;
1196
1197 case LSHIFT_EXPR:
1198 case RSHIFT_EXPR:
1199 case LROTATE_EXPR:
1200 case RROTATE_EXPR:
1201 /* The RHS of a shift can be any type. Also, ignore any modulus
1202 (we used to abort, but this is needed for unchecked conversion
1203 to modular types). Otherwise, processing is the same as normal. */
1204 gcc_assert (operation_type == left_base_type);
1205 modulus = NULL_TREE;
1206 left_operand = convert (operation_type, left_operand);
1207 break;
1208
1209 case BIT_AND_EXPR:
1210 case BIT_IOR_EXPR:
1211 case BIT_XOR_EXPR:
1212 /* For binary modulus, if the inputs are in range, so are the
1213 outputs. */
1214 if (modulus && integer_pow2p (modulus))
1215 modulus = NULL_TREE;
1216 goto common;
1217
1218 case COMPLEX_EXPR:
1219 gcc_assert (TREE_TYPE (result_type) == left_base_type
1220 && TREE_TYPE (result_type) == right_base_type);
1221 left_operand = convert (left_base_type, left_operand);
1222 right_operand = convert (right_base_type, right_operand);
1223 break;
1224
1225 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1226 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1227 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1228 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1229 /* These always produce results lower than either operand. */
1230 modulus = NULL_TREE;
1231 goto common;
1232
1233 case POINTER_PLUS_EXPR:
1234 gcc_assert (operation_type == left_base_type
1235 && sizetype == right_base_type);
1236 left_operand = convert (operation_type, left_operand);
1237 right_operand = convert (sizetype, right_operand);
1238 break;
1239
1240 case PLUS_NOMOD_EXPR:
1241 case MINUS_NOMOD_EXPR:
1242 if (op_code == PLUS_NOMOD_EXPR)
1243 op_code = PLUS_EXPR;
1244 else
1245 op_code = MINUS_EXPR;
1246 modulus = NULL_TREE;
1247
1248 /* ... fall through ... */
1249
1250 case PLUS_EXPR:
1251 case MINUS_EXPR:
1252 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1253 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1254 these types but can generate addition/subtraction for Succ/Pred. */
1255 if (operation_type
1256 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1257 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1258 operation_type = left_base_type = right_base_type
1259 = gnat_type_for_mode (TYPE_MODE (operation_type),
1260 TYPE_UNSIGNED (operation_type));
1261
1262 /* ... fall through ... */
1263
1264 default:
1265 common:
1266 /* The result type should be the same as the base types of the
1267 both operands (and they should be the same). Convert
1268 everything to the result type. */
1269
1270 gcc_assert (operation_type == left_base_type
1271 && left_base_type == right_base_type);
1272 left_operand = convert (operation_type, left_operand);
1273 right_operand = convert (operation_type, right_operand);
1274 }
1275
1276 if (modulus && !integer_pow2p (modulus))
1277 {
1278 result = nonbinary_modular_operation (op_code, operation_type,
1279 left_operand, right_operand);
1280 modulus = NULL_TREE;
1281 }
1282 /* If either operand is a NULL_EXPR, just return a new one. */
1283 else if (TREE_CODE (left_operand) == NULL_EXPR)
1284 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1285 else if (TREE_CODE (right_operand) == NULL_EXPR)
1286 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1287 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1288 {
1289 result = build4 (op_code, operation_type, left_operand, right_operand,
1290 NULL_TREE, NULL_TREE);
1291 if (!no_fold)
1292 result = fold (result);
1293 }
1294 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1295 result = build2 (op_code, void_type_node, left_operand, right_operand);
1296 else if (no_fold)
1297 result = build2 (op_code, operation_type, left_operand, right_operand);
1298 else
1299 result
1300 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1301
1302 if (TREE_CONSTANT (result))
1303 ;
1304 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1305 {
1306 if (TYPE_VOLATILE (operation_type))
1307 TREE_THIS_VOLATILE (result) = 1;
1308 }
1309 else if (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand))
1310 TREE_CONSTANT (result) = 1;
1311
1312 if (has_side_effects)
1313 TREE_SIDE_EFFECTS (result) = 1;
1314
1315 /* If we are working with modular types, perform the MOD operation
1316 if something above hasn't eliminated the need for it. */
1317 if (modulus)
1318 {
1319 modulus = convert (operation_type, modulus);
1320 if (no_fold)
1321 result = build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1322 else
1323 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result, modulus);
1324 }
1325
1326 if (result_type && result_type != operation_type)
1327 result = convert (result_type, result);
1328
1329 return result;
1330 }
1331
1332 /* Similar, but for unary operations. */
1333
1334 tree
1335 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1336 {
1337 tree type = TREE_TYPE (operand);
1338 tree base_type = get_base_type (type);
1339 tree operation_type = result_type;
1340 tree result;
1341
1342 if (operation_type
1343 && TREE_CODE (operation_type) == RECORD_TYPE
1344 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1345 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1346
1347 if (operation_type
1348 && TREE_CODE (operation_type) == INTEGER_TYPE
1349 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1350 operation_type = get_base_type (operation_type);
1351
1352 switch (op_code)
1353 {
1354 case REALPART_EXPR:
1355 case IMAGPART_EXPR:
1356 if (!operation_type)
1357 result_type = operation_type = TREE_TYPE (type);
1358 else
1359 gcc_assert (result_type == TREE_TYPE (type));
1360
1361 result = fold_build1 (op_code, operation_type, operand);
1362 break;
1363
1364 case TRUTH_NOT_EXPR:
1365 gcc_checking_assert
1366 (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1367 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1368 /* When not optimizing, fold the result as invert_truthvalue_loc
1369 doesn't fold the result of comparisons. This is intended to undo
1370 the trick used for boolean rvalues in gnat_to_gnu. */
1371 if (!optimize)
1372 result = fold (result);
1373 break;
1374
1375 case ATTR_ADDR_EXPR:
1376 case ADDR_EXPR:
1377 switch (TREE_CODE (operand))
1378 {
1379 case INDIRECT_REF:
1380 case UNCONSTRAINED_ARRAY_REF:
1381 result = TREE_OPERAND (operand, 0);
1382
1383 /* Make sure the type here is a pointer, not a reference.
1384 GCC wants pointer types for function addresses. */
1385 if (!result_type)
1386 result_type = build_pointer_type (type);
1387
1388 /* If the underlying object can alias everything, propagate the
1389 property since we are effectively retrieving the object. */
1390 if (POINTER_TYPE_P (TREE_TYPE (result))
1391 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1392 {
1393 if (TREE_CODE (result_type) == POINTER_TYPE
1394 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1395 result_type
1396 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1397 TYPE_MODE (result_type),
1398 true);
1399 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1400 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1401 result_type
1402 = build_reference_type_for_mode (TREE_TYPE (result_type),
1403 TYPE_MODE (result_type),
1404 true);
1405 }
1406 break;
1407
1408 case NULL_EXPR:
1409 result = operand;
1410 TREE_TYPE (result) = type = build_pointer_type (type);
1411 break;
1412
1413 case COMPOUND_EXPR:
1414 /* Fold a compound expression if it has unconstrained array type
1415 since the middle-end cannot handle it. But we don't it in the
1416 general case because it may introduce aliasing issues if the
1417 first operand is an indirect assignment and the second operand
1418 the corresponding address, e.g. for an allocator. However do
1419 it for a return value to expose it for later recognition. */
1420 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE
1421 || (TREE_CODE (TREE_OPERAND (operand, 1)) == VAR_DECL
1422 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand, 1))))
1423 {
1424 result = build_unary_op (ADDR_EXPR, result_type,
1425 TREE_OPERAND (operand, 1));
1426 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1427 TREE_OPERAND (operand, 0), result);
1428 break;
1429 }
1430 goto common;
1431
1432 case ARRAY_REF:
1433 case ARRAY_RANGE_REF:
1434 case COMPONENT_REF:
1435 case BIT_FIELD_REF:
1436 /* If this is for 'Address, find the address of the prefix and add
1437 the offset to the field. Otherwise, do this the normal way. */
1438 if (op_code == ATTR_ADDR_EXPR)
1439 {
1440 poly_int64 bitsize;
1441 poly_int64 bitpos;
1442 tree offset, inner;
1443 machine_mode mode;
1444 int unsignedp, reversep, volatilep;
1445
1446 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1447 &mode, &unsignedp, &reversep,
1448 &volatilep);
1449
1450 /* If INNER is a padding type whose field has a self-referential
1451 size, convert to that inner type. We know the offset is zero
1452 and we need to have that type visible. */
1453 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1454 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1455 inner);
1456
1457 /* Compute the offset as a byte offset from INNER. */
1458 if (!offset)
1459 offset = size_zero_node;
1460
1461 offset
1462 = size_binop (PLUS_EXPR, offset,
1463 size_int (bits_to_bytes_round_down (bitpos)));
1464
1465 /* Take the address of INNER, convert it to a pointer to our type
1466 and add the offset. */
1467 inner = build_unary_op (ADDR_EXPR,
1468 build_pointer_type (TREE_TYPE (operand)),
1469 inner);
1470 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1471 inner, offset);
1472 break;
1473 }
1474 goto common;
1475
1476 case CONSTRUCTOR:
1477 /* If this is just a constructor for a padded record, we can
1478 just take the address of the single field and convert it to
1479 a pointer to our type. */
1480 if (TYPE_IS_PADDING_P (type))
1481 {
1482 result
1483 = build_unary_op (ADDR_EXPR,
1484 build_pointer_type (TREE_TYPE (operand)),
1485 CONSTRUCTOR_ELT (operand, 0)->value);
1486 break;
1487 }
1488 goto common;
1489
1490 case NOP_EXPR:
1491 if (AGGREGATE_TYPE_P (type)
1492 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1493 return build_unary_op (ADDR_EXPR, result_type,
1494 TREE_OPERAND (operand, 0));
1495
1496 /* ... fallthru ... */
1497
1498 case VIEW_CONVERT_EXPR:
1499 /* If this just a variant conversion or if the conversion doesn't
1500 change the mode, get the result type from this type and go down.
1501 This is needed for conversions of CONST_DECLs, to eventually get
1502 to the address of their CORRESPONDING_VARs. */
1503 if ((TYPE_MAIN_VARIANT (type)
1504 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1505 || (TYPE_MODE (type) != BLKmode
1506 && (TYPE_MODE (type)
1507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1508 return build_unary_op (ADDR_EXPR,
1509 (result_type ? result_type
1510 : build_pointer_type (type)),
1511 TREE_OPERAND (operand, 0));
1512 goto common;
1513
1514 case CONST_DECL:
1515 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1516
1517 /* ... fall through ... */
1518
1519 default:
1520 common:
1521
1522 /* If we are taking the address of a padded record whose field
1523 contains a template, take the address of the field. */
1524 if (TYPE_IS_PADDING_P (type)
1525 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1526 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1527 {
1528 type = TREE_TYPE (TYPE_FIELDS (type));
1529 operand = convert (type, operand);
1530 }
1531
1532 gnat_mark_addressable (operand);
1533 result = build_fold_addr_expr (operand);
1534 }
1535
1536 if (TREE_CONSTANT (operand) || staticp (operand))
1537 TREE_CONSTANT (result) = 1;
1538
1539 break;
1540
1541 case INDIRECT_REF:
1542 {
1543 tree t = remove_conversions (operand, false);
1544 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1545
1546 /* If TYPE is a thin pointer, either first retrieve the base if this
1547 is an expression with an offset built for the initialization of an
1548 object with an unconstrained nominal subtype, or else convert to
1549 the fat pointer. */
1550 if (TYPE_IS_THIN_POINTER_P (type))
1551 {
1552 tree rec_type = TREE_TYPE (type);
1553
1554 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1555 && TREE_OPERAND (operand, 1)
1556 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1557 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1558 {
1559 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1560 type = TREE_TYPE (operand);
1561 }
1562 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1563 {
1564 operand
1565 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1566 operand);
1567 type = TREE_TYPE (operand);
1568 }
1569 }
1570
1571 /* If we want to refer to an unconstrained array, use the appropriate
1572 expression. But this will never survive down to the back-end. */
1573 if (TYPE_IS_FAT_POINTER_P (type))
1574 {
1575 result = build1 (UNCONSTRAINED_ARRAY_REF,
1576 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1577 TREE_READONLY (result)
1578 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1579 }
1580
1581 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1582 else if (TREE_CODE (operand) == ADDR_EXPR)
1583 result = TREE_OPERAND (operand, 0);
1584
1585 /* Otherwise, build and fold the indirect reference. */
1586 else
1587 {
1588 result = build_fold_indirect_ref (operand);
1589 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1590 }
1591
1592 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1593 {
1594 TREE_SIDE_EFFECTS (result) = 1;
1595 if (TREE_CODE (result) == INDIRECT_REF)
1596 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1597 }
1598
1599 if ((TREE_CODE (result) == INDIRECT_REF
1600 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1601 && can_never_be_null)
1602 TREE_THIS_NOTRAP (result) = 1;
1603
1604 break;
1605 }
1606
1607 case NEGATE_EXPR:
1608 case BIT_NOT_EXPR:
1609 {
1610 tree modulus = ((operation_type
1611 && TREE_CODE (operation_type) == INTEGER_TYPE
1612 && TYPE_MODULAR_P (operation_type))
1613 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1614 int mod_pow2 = modulus && integer_pow2p (modulus);
1615
1616 /* If this is a modular type, there are various possibilities
1617 depending on the operation and whether the modulus is a
1618 power of two or not. */
1619
1620 if (modulus)
1621 {
1622 gcc_assert (operation_type == base_type);
1623 operand = convert (operation_type, operand);
1624
1625 /* The fastest in the negate case for binary modulus is
1626 the straightforward code; the TRUNC_MOD_EXPR below
1627 is an AND operation. */
1628 if (op_code == NEGATE_EXPR && mod_pow2)
1629 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1630 fold_build1 (NEGATE_EXPR, operation_type,
1631 operand),
1632 modulus);
1633
1634 /* For nonbinary negate case, return zero for zero operand,
1635 else return the modulus minus the operand. If the modulus
1636 is a power of two minus one, we can do the subtraction
1637 as an XOR since it is equivalent and faster on most machines. */
1638 else if (op_code == NEGATE_EXPR && !mod_pow2)
1639 {
1640 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1641 modulus,
1642 build_int_cst (operation_type,
1643 1))))
1644 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1645 operand, modulus);
1646 else
1647 result = fold_build2 (MINUS_EXPR, operation_type,
1648 modulus, operand);
1649
1650 result = fold_build3 (COND_EXPR, operation_type,
1651 fold_build2 (NE_EXPR,
1652 boolean_type_node,
1653 operand,
1654 build_int_cst
1655 (operation_type, 0)),
1656 result, operand);
1657 }
1658 else
1659 {
1660 /* For the NOT cases, we need a constant equal to
1661 the modulus minus one. For a binary modulus, we
1662 XOR against the constant and subtract the operand from
1663 that constant for nonbinary modulus. */
1664
1665 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1666 build_int_cst (operation_type, 1));
1667
1668 if (mod_pow2)
1669 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1670 operand, cnst);
1671 else
1672 result = fold_build2 (MINUS_EXPR, operation_type,
1673 cnst, operand);
1674 }
1675
1676 break;
1677 }
1678 }
1679
1680 /* ... fall through ... */
1681
1682 default:
1683 gcc_assert (operation_type == base_type);
1684 result = fold_build1 (op_code, operation_type,
1685 convert (operation_type, operand));
1686 }
1687
1688 if (result_type && TREE_TYPE (result) != result_type)
1689 result = convert (result_type, result);
1690
1691 return result;
1692 }
1693
1694 /* Similar, but for COND_EXPR. */
1695
1696 tree
1697 build_cond_expr (tree result_type, tree condition_operand,
1698 tree true_operand, tree false_operand)
1699 {
1700 bool addr_p = false;
1701 tree result;
1702
1703 /* The front-end verified that result, true and false operands have
1704 same base type. Convert everything to the result type. */
1705 true_operand = convert (result_type, true_operand);
1706 false_operand = convert (result_type, false_operand);
1707
1708 /* If the result type is unconstrained, take the address of the operands and
1709 then dereference the result. Likewise if the result type is passed by
1710 reference, because creating a temporary of this type is not allowed. */
1711 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1712 || TYPE_IS_BY_REFERENCE_P (result_type)
1713 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1714 {
1715 result_type = build_pointer_type (result_type);
1716 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1717 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1718 addr_p = true;
1719 }
1720
1721 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1722 true_operand, false_operand);
1723
1724 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1725 in both arms, make sure it gets evaluated by moving it ahead of the
1726 conditional expression. This is necessary because it is evaluated
1727 in only one place at run time and would otherwise be uninitialized
1728 in one of the arms. */
1729 true_operand = skip_simple_arithmetic (true_operand);
1730 false_operand = skip_simple_arithmetic (false_operand);
1731
1732 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1733 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1734
1735 if (addr_p)
1736 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1737
1738 return result;
1739 }
1740
1741 /* Similar, but for COMPOUND_EXPR. */
1742
1743 tree
1744 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1745 {
1746 bool addr_p = false;
1747 tree result;
1748
1749 /* If the result type is unconstrained, take the address of the operand and
1750 then dereference the result. Likewise if the result type is passed by
1751 reference, but this is natively handled in the gimplifier. */
1752 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1753 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1754 {
1755 result_type = build_pointer_type (result_type);
1756 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1757 addr_p = true;
1758 }
1759
1760 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1761 expr_operand);
1762
1763 if (addr_p)
1764 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1765
1766 return result;
1767 }
1768
1769 /* Conveniently construct a function call expression. FNDECL names the
1770 function to be called, N is the number of arguments, and the "..."
1771 parameters are the argument expressions. Unlike build_call_expr
1772 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1773
1774 tree
1775 build_call_n_expr (tree fndecl, int n, ...)
1776 {
1777 va_list ap;
1778 tree fntype = TREE_TYPE (fndecl);
1779 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1780
1781 va_start (ap, n);
1782 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1783 va_end (ap);
1784 return fn;
1785 }
1786
1787 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1788 MSG gives the exception's identity for the call to Local_Raise, if any. */
1789
1790 static tree
1791 build_goto_raise (Entity_Id gnat_label, int msg)
1792 {
1793 tree gnu_label = gnat_to_gnu_entity (gnat_label, NULL_TREE, false);
1794 tree gnu_result = build1 (GOTO_EXPR, void_type_node, gnu_label);
1795 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1796
1797 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1798 if (Present (local_raise))
1799 {
1800 tree gnu_local_raise
1801 = gnat_to_gnu_entity (local_raise, NULL_TREE, false);
1802 tree gnu_exception_entity
1803 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, false);
1804 tree gnu_call
1805 = build_call_n_expr (gnu_local_raise, 1,
1806 build_unary_op (ADDR_EXPR, NULL_TREE,
1807 gnu_exception_entity));
1808 gnu_result
1809 = build2 (COMPOUND_EXPR, void_type_node, gnu_call, gnu_result);
1810 }
1811
1812 TREE_USED (gnu_label) = 1;
1813 return gnu_result;
1814 }
1815
1816 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1817 pointed to by FILENAME, LINE and COL. Fall back to the current location
1818 if GNAT_NODE is absent or has no SLOC. */
1819
1820 static void
1821 expand_sloc (Node_Id gnat_node, tree *filename, tree *line, tree *col)
1822 {
1823 const char *str;
1824 int line_number, column_number;
1825
1826 if (Debug_Flag_NN || Exception_Locations_Suppressed)
1827 {
1828 str = "";
1829 line_number = 0;
1830 column_number = 0;
1831 }
1832 else if (Present (gnat_node) && Sloc (gnat_node) != No_Location)
1833 {
1834 str = Get_Name_String
1835 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node))));
1836 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1837 column_number = Get_Column_Number (Sloc (gnat_node));
1838 }
1839 else
1840 {
1841 str = lbasename (LOCATION_FILE (input_location));
1842 line_number = LOCATION_LINE (input_location);
1843 column_number = LOCATION_COLUMN (input_location);
1844 }
1845
1846 const int len = strlen (str);
1847 *filename = build_string (len, str);
1848 TREE_TYPE (*filename) = build_array_type (char_type_node,
1849 build_index_type (size_int (len)));
1850 *line = build_int_cst (NULL_TREE, line_number);
1851 if (col)
1852 *col = build_int_cst (NULL_TREE, column_number);
1853 }
1854
1855 /* Build a call to a function that raises an exception and passes file name
1856 and line number, if requested. MSG says which exception function to call.
1857 GNAT_NODE is the node conveying the source location for which the error
1858 should be signaled, or Empty in which case the error is signaled for the
1859 current location. KIND says which kind of exception node this is for,
1860 among N_Raise_{Constraint,Storage,Program}_Error. */
1861
1862 tree
1863 build_call_raise (int msg, Node_Id gnat_node, char kind)
1864 {
1865 Entity_Id gnat_label = get_exception_label (kind);
1866 tree fndecl = gnat_raise_decls[msg];
1867 tree filename, line;
1868
1869 /* If this is to be done as a goto, handle that case. */
1870 if (Present (gnat_label))
1871 return build_goto_raise (gnat_label, msg);
1872
1873 expand_sloc (gnat_node, &filename, &line, NULL);
1874
1875 return
1876 build_call_n_expr (fndecl, 2,
1877 build1 (ADDR_EXPR,
1878 build_pointer_type (char_type_node),
1879 filename),
1880 line);
1881 }
1882
1883 /* Similar to build_call_raise, with extra information about the column
1884 where the check failed. */
1885
1886 tree
1887 build_call_raise_column (int msg, Node_Id gnat_node, char kind)
1888 {
1889 Entity_Id gnat_label = get_exception_label (kind);
1890 tree fndecl = gnat_raise_decls_ext[msg];
1891 tree filename, line, col;
1892
1893 /* If this is to be done as a goto, handle that case. */
1894 if (Present (gnat_label))
1895 return build_goto_raise (gnat_label, msg);
1896
1897 expand_sloc (gnat_node, &filename, &line, &col);
1898
1899 return
1900 build_call_n_expr (fndecl, 3,
1901 build1 (ADDR_EXPR,
1902 build_pointer_type (char_type_node),
1903 filename),
1904 line, col);
1905 }
1906
1907 /* Similar to build_call_raise_column, for an index or range check exception ,
1908 with extra information of the form "INDEX out of range FIRST..LAST". */
1909
1910 tree
1911 build_call_raise_range (int msg, Node_Id gnat_node, char kind,
1912 tree index, tree first, tree last)
1913 {
1914 Entity_Id gnat_label = get_exception_label (kind);
1915 tree fndecl = gnat_raise_decls_ext[msg];
1916 tree filename, line, col;
1917
1918 /* If this is to be done as a goto, handle that case. */
1919 if (Present (gnat_label))
1920 return build_goto_raise (gnat_label, msg);
1921
1922 expand_sloc (gnat_node, &filename, &line, &col);
1923
1924 return
1925 build_call_n_expr (fndecl, 6,
1926 build1 (ADDR_EXPR,
1927 build_pointer_type (char_type_node),
1928 filename),
1929 line, col,
1930 convert (integer_type_node, index),
1931 convert (integer_type_node, first),
1932 convert (integer_type_node, last));
1933 }
1934
1935 /* qsort comparer for the bit positions of two constructor elements
1936 for record components. */
1937
1938 static int
1939 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1940 {
1941 const constructor_elt * const elmt1 = (const constructor_elt *) rt1;
1942 const constructor_elt * const elmt2 = (const constructor_elt *) rt2;
1943 const_tree const field1 = elmt1->index;
1944 const_tree const field2 = elmt2->index;
1945 const int ret
1946 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1947
1948 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1949 }
1950
1951 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1952
1953 tree
1954 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1955 {
1956 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1957 bool read_only = true;
1958 bool side_effects = false;
1959 tree result, obj, val;
1960 unsigned int n_elmts;
1961
1962 /* Scan the elements to see if they are all constant or if any has side
1963 effects, to let us set global flags on the resulting constructor. Count
1964 the elements along the way for possible sorting purposes below. */
1965 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1966 {
1967 /* The predicate must be in keeping with output_constructor and, unlike
1968 initializer_constant_valid_p, we accept "&{...}" because we'll put
1969 the CONSTRUCTOR into the constant pool during gimplification. */
1970 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1971 || (TREE_CODE (type) == RECORD_TYPE
1972 && CONSTRUCTOR_BITFIELD_P (obj)
1973 && !initializer_constant_valid_for_bitfield_p (val))
1974 || (!initializer_constant_valid_p (val,
1975 TREE_TYPE (val),
1976 TYPE_REVERSE_STORAGE_ORDER (type))
1977 && !(TREE_CODE (val) == ADDR_EXPR
1978 && TREE_CODE (TREE_OPERAND (val, 0)) == CONSTRUCTOR
1979 && TREE_CONSTANT (TREE_OPERAND (val, 0)))))
1980 allconstant = false;
1981
1982 if (!TREE_READONLY (val))
1983 read_only = false;
1984
1985 if (TREE_SIDE_EFFECTS (val))
1986 side_effects = true;
1987 }
1988
1989 /* For record types with constant components only, sort field list
1990 by increasing bit position. This is necessary to ensure the
1991 constructor can be output as static data. */
1992 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1993 v->qsort (compare_elmt_bitpos);
1994
1995 result = build_constructor (type, v);
1996 CONSTRUCTOR_NO_CLEARING (result) = 1;
1997 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1998 TREE_SIDE_EFFECTS (result) = side_effects;
1999 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
2000 return result;
2001 }
2002
2003 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
2004 is not found in the record. Don't fold the result if NO_FOLD is true. */
2005
2006 static tree
2007 build_simple_component_ref (tree record, tree field, bool no_fold)
2008 {
2009 tree type = TYPE_MAIN_VARIANT (TREE_TYPE (record));
2010 tree ref;
2011
2012 /* The failure of this assertion will very likely come from a missing
2013 insertion of an explicit dereference. */
2014 gcc_assert (RECORD_OR_UNION_TYPE_P (type) && COMPLETE_TYPE_P (type));
2015
2016 /* Try to fold a conversion from another record or union type unless the type
2017 contains a placeholder as it might be needed for a later substitution. */
2018 if (TREE_CODE (record) == VIEW_CONVERT_EXPR
2019 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record, 0)))
2020 && !type_contains_placeholder_p (type))
2021 {
2022 tree op = TREE_OPERAND (record, 0);
2023
2024 /* If this is an unpadding operation, convert the underlying object to
2025 the unpadded type directly. */
2026 if (TYPE_IS_PADDING_P (type) && field == TYPE_FIELDS (type))
2027 return convert (TREE_TYPE (field), op);
2028
2029 /* Otherwise try to access FIELD directly in the underlying type, but
2030 make sure that the form of the reference doesn't change too much;
2031 this can happen for an unconstrained bit-packed array type whose
2032 constrained form can be an integer type. */
2033 ref = build_simple_component_ref (op, field, no_fold);
2034 if (ref && TREE_CODE (TREE_TYPE (ref)) == TREE_CODE (TREE_TYPE (field)))
2035 return ref;
2036 }
2037
2038 /* If this field is not in the specified record, see if we can find a field
2039 in the specified record whose original field is the same as this one. */
2040 if (DECL_CONTEXT (field) != type)
2041 {
2042 tree new_field;
2043
2044 /* First loop through normal components. */
2045 for (new_field = TYPE_FIELDS (type);
2046 new_field;
2047 new_field = DECL_CHAIN (new_field))
2048 if (SAME_FIELD_P (field, new_field))
2049 break;
2050
2051 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2052 component in the first search. Doing this search in two steps is
2053 required to avoid hidden homonymous fields in the _Parent field. */
2054 if (!new_field)
2055 for (new_field = TYPE_FIELDS (type);
2056 new_field;
2057 new_field = DECL_CHAIN (new_field))
2058 if (DECL_INTERNAL_P (new_field)
2059 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field)))
2060 {
2061 tree field_ref
2062 = build_simple_component_ref (record, new_field, no_fold);
2063 ref = build_simple_component_ref (field_ref, field, no_fold);
2064 if (ref)
2065 return ref;
2066 }
2067
2068 field = new_field;
2069 }
2070
2071 if (!field)
2072 return NULL_TREE;
2073
2074 /* If the field's offset has overflowed, do not try to access it, as doing
2075 so may trigger sanity checks deeper in the back-end. Note that we don't
2076 need to warn since this will be done on trying to declare the object. */
2077 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2078 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2079 return build1 (NULL_EXPR, TREE_TYPE (field),
2080 build_call_raise (SE_Object_Too_Large, Empty,
2081 N_Raise_Storage_Error));
2082
2083 ref = build3 (COMPONENT_REF, TREE_TYPE (field), record, field, NULL_TREE);
2084
2085 if (TREE_READONLY (record)
2086 || TREE_READONLY (field)
2087 || TYPE_READONLY (type))
2088 TREE_READONLY (ref) = 1;
2089
2090 if (TREE_THIS_VOLATILE (record)
2091 || TREE_THIS_VOLATILE (field)
2092 || TYPE_VOLATILE (type))
2093 TREE_THIS_VOLATILE (ref) = 1;
2094
2095 if (no_fold)
2096 return ref;
2097
2098 /* The generic folder may punt in this case because the inner array type
2099 can be self-referential, but folding is in fact not problematic. */
2100 if (TREE_CODE (record) == CONSTRUCTOR
2101 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record)))
2102 {
2103 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (record);
2104 unsigned HOST_WIDE_INT idx;
2105 tree index, value;
2106 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2107 if (index == field)
2108 return value;
2109 return ref;
2110 }
2111
2112 return fold (ref);
2113 }
2114
2115 /* Likewise, but return NULL_EXPR and generate a Program_Error if the
2116 field is not found in the record. */
2117
2118 tree
2119 build_component_ref (tree record, tree field, bool no_fold)
2120 {
2121 tree ref = build_simple_component_ref (record, field, no_fold);
2122 if (ref)
2123 return ref;
2124
2125 /* The missing field should have been detected in the front-end. */
2126 gigi_checking_assert (false);
2127
2128 /* Assume this is an invalid user field so raise Program_Error. */
2129 return build1 (NULL_EXPR, TREE_TYPE (field),
2130 build_call_raise (PE_Explicit_Raise, Empty,
2131 N_Raise_Program_Error));
2132 }
2133
2134 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2135 identically. Process the case where a GNAT_PROC to call is provided. */
2136
2137 static inline tree
2138 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2139 Entity_Id gnat_proc, Entity_Id gnat_pool)
2140 {
2141 tree gnu_proc = gnat_to_gnu (gnat_proc);
2142 tree gnu_call;
2143
2144 /* A storage pool's underlying type is a record type (for both predefined
2145 storage pools and GNAT simple storage pools). The secondary stack uses
2146 the same mechanism, but its pool object (SS_Pool) is an integer. */
2147 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2148 {
2149 /* The size is the third parameter; the alignment is the
2150 same type. */
2151 Entity_Id gnat_size_type
2152 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2153 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2154
2155 tree gnu_pool = gnat_to_gnu (gnat_pool);
2156 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2157 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2158
2159 gnu_size = convert (gnu_size_type, gnu_size);
2160 gnu_align = convert (gnu_size_type, gnu_align);
2161
2162 /* The first arg is always the address of the storage pool; next
2163 comes the address of the object, for a deallocator, then the
2164 size and alignment. */
2165 if (gnu_obj)
2166 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2167 gnu_size, gnu_align);
2168 else
2169 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2170 gnu_size, gnu_align);
2171 }
2172
2173 /* Secondary stack case. */
2174 else
2175 {
2176 /* The size is the second parameter. */
2177 Entity_Id gnat_size_type
2178 = Etype (Next_Formal (First_Formal (gnat_proc)));
2179 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2180
2181 gnu_size = convert (gnu_size_type, gnu_size);
2182
2183 /* The first arg is the address of the object, for a deallocator,
2184 then the size. */
2185 if (gnu_obj)
2186 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2187 else
2188 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2189 }
2190
2191 return gnu_call;
2192 }
2193
2194 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2195 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2196 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2197 latter offers. */
2198
2199 static inline tree
2200 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2201 {
2202 /* When the DATA_TYPE alignment is stricter than what malloc offers
2203 (super-aligned case), we allocate an "aligning" wrapper type and return
2204 the address of its single data field with the malloc's return value
2205 stored just in front. */
2206
2207 unsigned int data_align = TYPE_ALIGN (data_type);
2208 unsigned int system_allocator_alignment
2209 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2210
2211 tree aligning_type
2212 = ((data_align > system_allocator_alignment)
2213 ? make_aligning_type (data_type, data_align, data_size,
2214 system_allocator_alignment,
2215 POINTER_SIZE / BITS_PER_UNIT,
2216 gnat_node)
2217 : NULL_TREE);
2218
2219 tree size_to_malloc
2220 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2221
2222 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2223
2224 if (aligning_type)
2225 {
2226 /* Latch malloc's return value and get a pointer to the aligning field
2227 first. */
2228 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2229
2230 tree aligning_record_addr
2231 = convert (build_pointer_type (aligning_type), storage_ptr);
2232
2233 tree aligning_record
2234 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2235
2236 tree aligning_field
2237 = build_component_ref (aligning_record, TYPE_FIELDS (aligning_type),
2238 false);
2239
2240 tree aligning_field_addr
2241 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2242
2243 /* Then arrange to store the allocator's return value ahead
2244 and return. */
2245 tree storage_ptr_slot_addr
2246 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2247 convert (ptr_type_node, aligning_field_addr),
2248 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2249 / BITS_PER_UNIT));
2250
2251 tree storage_ptr_slot
2252 = build_unary_op (INDIRECT_REF, NULL_TREE,
2253 convert (build_pointer_type (ptr_type_node),
2254 storage_ptr_slot_addr));
2255
2256 return
2257 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2258 build_binary_op (INIT_EXPR, NULL_TREE,
2259 storage_ptr_slot, storage_ptr),
2260 aligning_field_addr);
2261 }
2262 else
2263 return malloc_ptr;
2264 }
2265
2266 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2267 designated by DATA_PTR using the __gnat_free entry point. */
2268
2269 static inline tree
2270 maybe_wrap_free (tree data_ptr, tree data_type)
2271 {
2272 /* In the regular alignment case, we pass the data pointer straight to free.
2273 In the superaligned case, we need to retrieve the initial allocator
2274 return value, stored in front of the data block at allocation time. */
2275
2276 unsigned int data_align = TYPE_ALIGN (data_type);
2277 unsigned int system_allocator_alignment
2278 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2279
2280 tree free_ptr;
2281
2282 if (data_align > system_allocator_alignment)
2283 {
2284 /* DATA_FRONT_PTR (void *)
2285 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2286 tree data_front_ptr
2287 = build_binary_op
2288 (POINTER_PLUS_EXPR, ptr_type_node,
2289 convert (ptr_type_node, data_ptr),
2290 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2291
2292 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2293 free_ptr
2294 = build_unary_op
2295 (INDIRECT_REF, NULL_TREE,
2296 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2297 }
2298 else
2299 free_ptr = data_ptr;
2300
2301 return build_call_n_expr (free_decl, 1, free_ptr);
2302 }
2303
2304 /* Build a GCC tree to call an allocation or deallocation function.
2305 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2306 generate an allocator.
2307
2308 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2309 object type, used to determine the to-be-honored address alignment.
2310 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2311 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2312 to provide an error location for restriction violation messages. */
2313
2314 tree
2315 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2316 Entity_Id gnat_proc, Entity_Id gnat_pool,
2317 Node_Id gnat_node)
2318 {
2319 /* Explicit proc to call ? This one is assumed to deal with the type
2320 alignment constraints. */
2321 if (Present (gnat_proc))
2322 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2323 gnat_proc, gnat_pool);
2324
2325 /* Otherwise, object to "free" or "malloc" with possible special processing
2326 for alignments stricter than what the default allocator honors. */
2327 else if (gnu_obj)
2328 return maybe_wrap_free (gnu_obj, gnu_type);
2329 else
2330 {
2331 /* Assert that we no longer can be called with this special pool. */
2332 gcc_assert (gnat_pool != -1);
2333
2334 /* Check that we aren't violating the associated restriction. */
2335 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2336 {
2337 Check_No_Implicit_Heap_Alloc (gnat_node);
2338 if (Has_Task (Etype (gnat_node)))
2339 Check_No_Implicit_Task_Alloc (gnat_node);
2340 if (Has_Protected (Etype (gnat_node)))
2341 Check_No_Implicit_Protected_Alloc (gnat_node);
2342 }
2343 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2344 }
2345 }
2346
2347 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2348 initial value is INIT, if INIT is nonzero. Convert the expression to
2349 RESULT_TYPE, which must be some pointer type, and return the result.
2350
2351 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2352 the storage pool to use. GNAT_NODE is used to provide an error
2353 location for restriction violation messages. If IGNORE_INIT_TYPE is
2354 true, ignore the type of INIT for the purpose of determining the size;
2355 this will cause the maximum size to be allocated if TYPE is of
2356 self-referential size. */
2357
2358 tree
2359 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2360 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2361 {
2362 tree size, storage, storage_deref, storage_init;
2363
2364 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2365 if (init && TREE_CODE (init) == NULL_EXPR)
2366 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2367
2368 /* If we are just annotating types, also return a NULL_EXPR. */
2369 else if (type_annotate_only)
2370 return build1 (NULL_EXPR, result_type,
2371 build_call_raise (CE_Range_Check_Failed, gnat_node,
2372 N_Raise_Constraint_Error));
2373
2374 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2375 else if (init && TREE_CODE (init) == COND_EXPR)
2376 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2377 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2378 gnat_proc, gnat_pool, gnat_node,
2379 ignore_init_type),
2380 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2381 gnat_proc, gnat_pool, gnat_node,
2382 ignore_init_type));
2383
2384 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2385 sizes of the object and its template. Allocate the whole thing and
2386 fill in the parts that are known. */
2387 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2388 {
2389 tree storage_type
2390 = build_unc_object_type_from_ptr (result_type, type,
2391 get_identifier ("ALLOC"), false);
2392 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2393 tree storage_ptr_type = build_pointer_type (storage_type);
2394
2395 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2396 init);
2397
2398 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2399 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2400 size = size_int (-1);
2401
2402 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2403 gnat_proc, gnat_pool, gnat_node);
2404 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2405 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2406 TREE_THIS_NOTRAP (storage_deref) = 1;
2407
2408 /* If there is an initializing expression, then make a constructor for
2409 the entire object including the bounds and copy it into the object.
2410 If there is no initializing expression, just set the bounds. */
2411 if (init)
2412 {
2413 vec<constructor_elt, va_gc> *v;
2414 vec_alloc (v, 2);
2415
2416 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2417 build_template (template_type, type, init));
2418 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2419 init);
2420 storage_init
2421 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2422 gnat_build_constructor (storage_type, v));
2423 }
2424 else
2425 storage_init
2426 = build_binary_op (INIT_EXPR, NULL_TREE,
2427 build_component_ref (storage_deref,
2428 TYPE_FIELDS (storage_type),
2429 false),
2430 build_template (template_type, type, NULL_TREE));
2431
2432 return build2 (COMPOUND_EXPR, result_type,
2433 storage_init, convert (result_type, storage));
2434 }
2435
2436 size = TYPE_SIZE_UNIT (type);
2437
2438 /* If we have an initializing expression, see if its size is simpler
2439 than the size from the type. */
2440 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2441 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2442 || CONTAINS_PLACEHOLDER_P (size)))
2443 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2444
2445 /* If the size is still self-referential, reference the initializing
2446 expression, if it is present. If not, this must have been a call
2447 to allocate a library-level object, in which case we just use the
2448 maximum size. */
2449 if (!ignore_init_type && init)
2450 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, init);
2451 else if (CONTAINS_PLACEHOLDER_P (size))
2452 size = max_size (size, true);
2453
2454 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2455 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2456 size = size_int (-1);
2457
2458 storage = convert (result_type,
2459 build_call_alloc_dealloc (NULL_TREE, size, type,
2460 gnat_proc, gnat_pool,
2461 gnat_node));
2462
2463 /* If we have an initial value, protect the new address, assign the value
2464 and return the address with a COMPOUND_EXPR. */
2465 if (init)
2466 {
2467 storage = gnat_protect_expr (storage);
2468 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2469 TREE_THIS_NOTRAP (storage_deref) = 1;
2470 storage_init
2471 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2472 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2473 }
2474
2475 return storage;
2476 }
2477
2478 /* Indicate that we need to take the address of T and that it therefore
2479 should not be allocated in a register. Return true if successful. */
2480
2481 bool
2482 gnat_mark_addressable (tree t)
2483 {
2484 while (true)
2485 switch (TREE_CODE (t))
2486 {
2487 case ADDR_EXPR:
2488 case COMPONENT_REF:
2489 case ARRAY_REF:
2490 case ARRAY_RANGE_REF:
2491 case REALPART_EXPR:
2492 case IMAGPART_EXPR:
2493 case VIEW_CONVERT_EXPR:
2494 case NON_LVALUE_EXPR:
2495 CASE_CONVERT:
2496 t = TREE_OPERAND (t, 0);
2497 break;
2498
2499 case COMPOUND_EXPR:
2500 t = TREE_OPERAND (t, 1);
2501 break;
2502
2503 case CONSTRUCTOR:
2504 TREE_ADDRESSABLE (t) = 1;
2505 return true;
2506
2507 case VAR_DECL:
2508 case PARM_DECL:
2509 case RESULT_DECL:
2510 TREE_ADDRESSABLE (t) = 1;
2511 return true;
2512
2513 case FUNCTION_DECL:
2514 TREE_ADDRESSABLE (t) = 1;
2515 return true;
2516
2517 case CONST_DECL:
2518 return DECL_CONST_CORRESPONDING_VAR (t)
2519 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2520
2521 default:
2522 return true;
2523 }
2524 }
2525
2526 /* Return true if EXP is a stable expression for the purpose of the functions
2527 below and, therefore, can be returned unmodified by them. We accept things
2528 that are actual constants or that have already been handled. */
2529
2530 static bool
2531 gnat_stable_expr_p (tree exp)
2532 {
2533 enum tree_code code = TREE_CODE (exp);
2534 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2535 }
2536
2537 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.cc
2538 but we know how to handle our own nodes. */
2539
2540 tree
2541 gnat_save_expr (tree exp)
2542 {
2543 tree type = TREE_TYPE (exp);
2544 enum tree_code code = TREE_CODE (exp);
2545
2546 if (gnat_stable_expr_p (exp))
2547 return exp;
2548
2549 if (code == UNCONSTRAINED_ARRAY_REF)
2550 {
2551 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2552 TREE_READONLY (t) = TYPE_READONLY (type);
2553 return t;
2554 }
2555
2556 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2557 This may be more efficient, but will also allow us to more easily find
2558 the match for the PLACEHOLDER_EXPR. */
2559 if (code == COMPONENT_REF
2560 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2561 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2562 TREE_OPERAND (exp, 1), NULL_TREE);
2563
2564 return save_expr (exp);
2565 }
2566
2567 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2568 is optimized under the assumption that EXP's value doesn't change before
2569 its subsequent reuse(s) except through its potential reevaluation. */
2570
2571 tree
2572 gnat_protect_expr (tree exp)
2573 {
2574 tree type = TREE_TYPE (exp);
2575 enum tree_code code = TREE_CODE (exp);
2576
2577 if (gnat_stable_expr_p (exp))
2578 return exp;
2579
2580 /* If EXP has no side effects, we theoretically don't need to do anything.
2581 However, we may be recursively passed more and more complex expressions
2582 involving checks which will be reused multiple times and eventually be
2583 unshared for gimplification; in order to avoid a complexity explosion
2584 at that point, we protect any expressions more complex than a simple
2585 arithmetic expression. */
2586 if (!TREE_SIDE_EFFECTS (exp))
2587 {
2588 tree inner = skip_simple_arithmetic (exp);
2589 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2590 return exp;
2591 }
2592
2593 /* If this is a conversion, protect what's inside the conversion. */
2594 if (code == NON_LVALUE_EXPR
2595 || CONVERT_EXPR_CODE_P (code)
2596 || code == VIEW_CONVERT_EXPR)
2597 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2598
2599 /* If we're indirectly referencing something, we only need to protect the
2600 address since the data itself can't change in these situations. */
2601 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2602 {
2603 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2604 TREE_READONLY (t) = TYPE_READONLY (type);
2605 return t;
2606 }
2607
2608 /* Likewise if we're indirectly referencing part of something. */
2609 if (code == COMPONENT_REF
2610 && TREE_CODE (TREE_OPERAND (exp, 0)) == INDIRECT_REF)
2611 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2612 TREE_OPERAND (exp, 1), NULL_TREE);
2613
2614 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2615 This may be more efficient, but will also allow us to more easily find
2616 the match for the PLACEHOLDER_EXPR. */
2617 if (code == COMPONENT_REF
2618 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2619 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2620 TREE_OPERAND (exp, 1), NULL_TREE);
2621
2622 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2623 for a CALL_EXPR as large objects are returned via invisible reference
2624 in most ABIs so the temporary will directly be filled by the callee. */
2625 if (TYPE_IS_FAT_POINTER_P (type)
2626 || !AGGREGATE_TYPE_P (type)
2627 || code == CALL_EXPR)
2628 return save_expr (exp);
2629
2630 /* Otherwise reference, protect the address and dereference. */
2631 return
2632 build_unary_op (INDIRECT_REF, type,
2633 save_expr (build_unary_op (ADDR_EXPR, NULL_TREE, exp)));
2634 }
2635
2636 /* This is equivalent to stabilize_reference_1 in tree.cc but we take an extra
2637 argument to force evaluation of everything. */
2638
2639 static tree
2640 gnat_stabilize_reference_1 (tree e, void *data)
2641 {
2642 const bool force = *(bool *)data;
2643 enum tree_code code = TREE_CODE (e);
2644 tree type = TREE_TYPE (e);
2645 tree result;
2646
2647 if (gnat_stable_expr_p (e))
2648 return e;
2649
2650 switch (TREE_CODE_CLASS (code))
2651 {
2652 case tcc_exceptional:
2653 case tcc_declaration:
2654 case tcc_comparison:
2655 case tcc_expression:
2656 case tcc_reference:
2657 case tcc_vl_exp:
2658 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2659 fat pointer. This may be more efficient, but will also allow
2660 us to more easily find the match for the PLACEHOLDER_EXPR. */
2661 if (code == COMPONENT_REF
2662 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2663 result
2664 = build3 (code, type,
2665 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2666 TREE_OPERAND (e, 1), NULL_TREE);
2667 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2668 so that it will only be evaluated once. */
2669 /* The tcc_reference and tcc_comparison classes could be handled as
2670 below, but it is generally faster to only evaluate them once. */
2671 else if (TREE_SIDE_EFFECTS (e) || force)
2672 return save_expr (e);
2673 else
2674 return e;
2675 break;
2676
2677 case tcc_binary:
2678 /* Recursively stabilize each operand. */
2679 result
2680 = build2 (code, type,
2681 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2682 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2683 break;
2684
2685 case tcc_unary:
2686 /* Recursively stabilize each operand. */
2687 result
2688 = build1 (code, type,
2689 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2690 break;
2691
2692 default:
2693 gcc_unreachable ();
2694 }
2695
2696 /* See gnat_rewrite_reference below for the rationale. */
2697 TREE_READONLY (result) = TREE_READONLY (e);
2698 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2699
2700 if (TREE_SIDE_EFFECTS (e))
2701 TREE_SIDE_EFFECTS (result) = 1;
2702
2703 return result;
2704 }
2705
2706 /* This is equivalent to stabilize_reference in tree.cc but we know how to
2707 handle our own nodes and we take extra arguments. FORCE says whether to
2708 force evaluation of everything in REF. INIT is set to the first arm of
2709 a COMPOUND_EXPR present in REF, if any. */
2710
2711 tree
2712 gnat_stabilize_reference (tree ref, bool force, tree *init)
2713 {
2714 return
2715 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2716 }
2717
2718 /* Rewrite reference REF and call FUNC on each expression within REF in the
2719 process. DATA is passed unmodified to FUNC. INIT is set to the first
2720 arm of a COMPOUND_EXPR present in REF, if any. */
2721
2722 tree
2723 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2724 {
2725 tree type = TREE_TYPE (ref);
2726 enum tree_code code = TREE_CODE (ref);
2727 tree result;
2728
2729 switch (code)
2730 {
2731 case CONST_DECL:
2732 case VAR_DECL:
2733 case PARM_DECL:
2734 case RESULT_DECL:
2735 /* No action is needed in this case. */
2736 return ref;
2737
2738 CASE_CONVERT:
2739 case FLOAT_EXPR:
2740 case FIX_TRUNC_EXPR:
2741 case REALPART_EXPR:
2742 case IMAGPART_EXPR:
2743 case VIEW_CONVERT_EXPR:
2744 result
2745 = build1 (code, type,
2746 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2747 init));
2748 break;
2749
2750 case INDIRECT_REF:
2751 case UNCONSTRAINED_ARRAY_REF:
2752 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2753 break;
2754
2755 case COMPONENT_REF:
2756 result = build3 (COMPONENT_REF, type,
2757 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2758 data, init),
2759 TREE_OPERAND (ref, 1), NULL_TREE);
2760 break;
2761
2762 case BIT_FIELD_REF:
2763 result = build3 (BIT_FIELD_REF, type,
2764 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2765 data, init),
2766 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2767 REF_REVERSE_STORAGE_ORDER (result) = REF_REVERSE_STORAGE_ORDER (ref);
2768 break;
2769
2770 case ARRAY_REF:
2771 case ARRAY_RANGE_REF:
2772 result
2773 = build4 (code, type,
2774 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2775 init),
2776 func (TREE_OPERAND (ref, 1), data),
2777 TREE_OPERAND (ref, 2), NULL_TREE);
2778 break;
2779
2780 case COMPOUND_EXPR:
2781 gcc_assert (!*init);
2782 *init = TREE_OPERAND (ref, 0);
2783 /* We expect only the pattern built in Call_to_gnu. */
2784 gcc_assert (DECL_P (TREE_OPERAND (ref, 1))
2785 || (TREE_CODE (TREE_OPERAND (ref, 1)) == COMPONENT_REF
2786 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref, 1), 0))));
2787 return TREE_OPERAND (ref, 1);
2788
2789 case CALL_EXPR:
2790 {
2791 /* This can only be an atomic load. */
2792 gcc_assert (call_is_atomic_load (ref));
2793
2794 /* An atomic load is an INDIRECT_REF of its first argument. */
2795 tree t = CALL_EXPR_ARG (ref, 0);
2796 if (TREE_CODE (t) == NOP_EXPR)
2797 t = TREE_OPERAND (t, 0);
2798 if (TREE_CODE (t) == ADDR_EXPR)
2799 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2800 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2801 init));
2802 else
2803 t = func (t, data);
2804 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2805
2806 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2807 t, CALL_EXPR_ARG (ref, 1));
2808 }
2809 break;
2810
2811 case ERROR_MARK:
2812 case NULL_EXPR:
2813 return ref;
2814
2815 default:
2816 gcc_unreachable ();
2817 }
2818
2819 /* TREE_READONLY and TREE_THIS_VOLATILE set on the initial expression may
2820 not be sustained across some paths, such as the one for INDIRECT_REF.
2821
2822 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2823 paths introduce side-effects where there was none initially (e.g. if a
2824 SAVE_EXPR is built) and we also want to keep track of that. */
2825 TREE_READONLY (result) = TREE_READONLY (ref);
2826 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2827
2828 if (TREE_SIDE_EFFECTS (ref))
2829 TREE_SIDE_EFFECTS (result) = 1;
2830
2831 if (code == INDIRECT_REF
2832 || code == UNCONSTRAINED_ARRAY_REF
2833 || code == ARRAY_REF
2834 || code == ARRAY_RANGE_REF)
2835 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2836
2837 return result;
2838 }
2839
2840 /* This is equivalent to get_inner_reference in expr.cc but it returns the
2841 ultimate containing object only if the reference (lvalue) is constant,
2842 i.e. if it doesn't depend on the context in which it is evaluated. */
2843
2844 tree
2845 get_inner_constant_reference (tree exp)
2846 {
2847 while (true)
2848 {
2849 switch (TREE_CODE (exp))
2850 {
2851 case BIT_FIELD_REF:
2852 break;
2853
2854 case COMPONENT_REF:
2855 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2856 return NULL_TREE;
2857 break;
2858
2859 case ARRAY_REF:
2860 case ARRAY_RANGE_REF:
2861 {
2862 if (TREE_OPERAND (exp, 2))
2863 return NULL_TREE;
2864
2865 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2866 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2867 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2868 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2869 return NULL_TREE;
2870 }
2871 break;
2872
2873 case REALPART_EXPR:
2874 case IMAGPART_EXPR:
2875 case VIEW_CONVERT_EXPR:
2876 break;
2877
2878 default:
2879 goto done;
2880 }
2881
2882 exp = TREE_OPERAND (exp, 0);
2883 }
2884
2885 done:
2886 return exp;
2887 }
2888
2889 /* Return true if EXPR is the addition or the subtraction of a constant and,
2890 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2891 if this is a subtraction. */
2892
2893 bool
2894 is_simple_additive_expression (tree expr, tree *add, tree *cst, bool *minus_p)
2895 {
2896 /* Skip overflow checks. */
2897 if (TREE_CODE (expr) == COND_EXPR
2898 && TREE_CODE (COND_EXPR_THEN (expr)) == COMPOUND_EXPR
2899 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr), 0)) == CALL_EXPR
2900 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr), 0))
2901 == gnat_raise_decls[CE_Overflow_Check_Failed])
2902 expr = COND_EXPR_ELSE (expr);
2903
2904 if (TREE_CODE (expr) == PLUS_EXPR)
2905 {
2906 if (TREE_CONSTANT (TREE_OPERAND (expr, 0)))
2907 {
2908 *add = TREE_OPERAND (expr, 1);
2909 *cst = TREE_OPERAND (expr, 0);
2910 *minus_p = false;
2911 return true;
2912 }
2913 else if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2914 {
2915 *add = TREE_OPERAND (expr, 0);
2916 *cst = TREE_OPERAND (expr, 1);
2917 *minus_p = false;
2918 return true;
2919 }
2920 }
2921 else if (TREE_CODE (expr) == MINUS_EXPR)
2922 {
2923 if (TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2924 {
2925 *add = TREE_OPERAND (expr, 0);
2926 *cst = TREE_OPERAND (expr, 1);
2927 *minus_p = true;
2928 return true;
2929 }
2930 }
2931
2932 return false;
2933 }
2934
2935 /* If EXPR is an expression that is invariant in the current function, in the
2936 sense that it can be evaluated anywhere in the function and any number of
2937 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2938
2939 tree
2940 gnat_invariant_expr (tree expr)
2941 {
2942 tree type = TREE_TYPE (expr);
2943 tree add, cst;
2944 bool minus_p;
2945
2946 expr = remove_conversions (expr, false);
2947
2948 /* Look through temporaries created to capture values. */
2949 while ((TREE_CODE (expr) == CONST_DECL
2950 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2951 && decl_function_context (expr) == current_function_decl
2952 && DECL_INITIAL (expr))
2953 {
2954 expr = DECL_INITIAL (expr);
2955 /* Look into CONSTRUCTORs built to initialize padded types. */
2956 expr = maybe_padded_object (expr);
2957 expr = remove_conversions (expr, false);
2958 }
2959
2960 /* We are only interested in scalar types at the moment and, even if we may
2961 have gone through padding types in the above loop, we must be back to a
2962 scalar value at this point. */
2963 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2964 return NULL_TREE;
2965
2966 if (TREE_CONSTANT (expr))
2967 return fold_convert (type, expr);
2968
2969 /* Deal with aligning patterns. */
2970 if (TREE_CODE (expr) == BIT_AND_EXPR
2971 && TREE_CONSTANT (TREE_OPERAND (expr, 1)))
2972 {
2973 tree op0 = gnat_invariant_expr (TREE_OPERAND (expr, 0));
2974 if (op0)
2975 return fold_build2 (BIT_AND_EXPR, type, op0, TREE_OPERAND (expr, 1));
2976 else
2977 return NULL_TREE;
2978 }
2979
2980 /* Deal with addition or subtraction of constants. */
2981 if (is_simple_additive_expression (expr, &add, &cst, &minus_p))
2982 {
2983 add = gnat_invariant_expr (add);
2984 if (add)
2985 return
2986 fold_build2 (minus_p ? MINUS_EXPR : PLUS_EXPR, type,
2987 fold_convert (type, add), fold_convert (type, cst));
2988 else
2989 return NULL_TREE;
2990 }
2991
2992 bool invariant_p = false;
2993 tree t = expr;
2994
2995 while (true)
2996 {
2997 switch (TREE_CODE (t))
2998 {
2999 case COMPONENT_REF:
3000 invariant_p |= DECL_INVARIANT_P (TREE_OPERAND (t, 1));
3001 break;
3002
3003 case ARRAY_REF:
3004 case ARRAY_RANGE_REF:
3005 if (!TREE_CONSTANT (TREE_OPERAND (t, 1)) || TREE_OPERAND (t, 2))
3006 return NULL_TREE;
3007 break;
3008
3009 case BIT_FIELD_REF:
3010 case REALPART_EXPR:
3011 case IMAGPART_EXPR:
3012 case VIEW_CONVERT_EXPR:
3013 CASE_CONVERT:
3014 break;
3015
3016 case INDIRECT_REF:
3017 if ((!invariant_p && !TREE_READONLY (t)) || TREE_SIDE_EFFECTS (t))
3018 return NULL_TREE;
3019 invariant_p = false;
3020 break;
3021
3022 default:
3023 goto object;
3024 }
3025
3026 t = TREE_OPERAND (t, 0);
3027 }
3028
3029 object:
3030 if (TREE_SIDE_EFFECTS (t))
3031 return NULL_TREE;
3032
3033 if (TREE_CODE (t) == CONST_DECL
3034 && (DECL_EXTERNAL (t)
3035 || decl_function_context (t) != current_function_decl))
3036 return fold_convert (type, expr);
3037
3038 if (!invariant_p && !TREE_READONLY (t))
3039 return NULL_TREE;
3040
3041 if (TREE_CODE (t) == PARM_DECL)
3042 return fold_convert (type, expr);
3043
3044 if (TREE_CODE (t) == VAR_DECL
3045 && (DECL_EXTERNAL (t)
3046 || decl_function_context (t) != current_function_decl))
3047 return fold_convert (type, expr);
3048
3049 return NULL_TREE;
3050 }