]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils2.c
genattrtab.c (write_header): Include hash-set.h...
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2014, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "hash-set.h"
31 #include "machmode.h"
32 #include "vec.h"
33 #include "double-int.h"
34 #include "input.h"
35 #include "alias.h"
36 #include "symtab.h"
37 #include "wide-int.h"
38 #include "inchash.h"
39 #include "tree.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "stringpool.h"
43 #include "varasm.h"
44 #include "flags.h"
45 #include "toplev.h"
46 #include "ggc.h"
47 #include "tree-inline.h"
48
49 #include "ada.h"
50 #include "types.h"
51 #include "atree.h"
52 #include "elists.h"
53 #include "namet.h"
54 #include "nlists.h"
55 #include "snames.h"
56 #include "stringt.h"
57 #include "uintp.h"
58 #include "fe.h"
59 #include "sinfo.h"
60 #include "einfo.h"
61 #include "ada-tree.h"
62 #include "gigi.h"
63
64 /* Return the base type of TYPE. */
65
66 tree
67 get_base_type (tree type)
68 {
69 if (TREE_CODE (type) == RECORD_TYPE
70 && TYPE_JUSTIFIED_MODULAR_P (type))
71 type = TREE_TYPE (TYPE_FIELDS (type));
72
73 while (TREE_TYPE (type)
74 && (TREE_CODE (type) == INTEGER_TYPE
75 || TREE_CODE (type) == REAL_TYPE))
76 type = TREE_TYPE (type);
77
78 return type;
79 }
80 \f
81 /* EXP is a GCC tree representing an address. See if we can find how
82 strictly the object at that address is aligned. Return that alignment
83 in bits. If we don't know anything about the alignment, return 0. */
84
85 unsigned int
86 known_alignment (tree exp)
87 {
88 unsigned int this_alignment;
89 unsigned int lhs, rhs;
90
91 switch (TREE_CODE (exp))
92 {
93 CASE_CONVERT:
94 case VIEW_CONVERT_EXPR:
95 case NON_LVALUE_EXPR:
96 /* Conversions between pointers and integers don't change the alignment
97 of the underlying object. */
98 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
99 break;
100
101 case COMPOUND_EXPR:
102 /* The value of a COMPOUND_EXPR is that of it's second operand. */
103 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
104 break;
105
106 case PLUS_EXPR:
107 case MINUS_EXPR:
108 /* If two address are added, the alignment of the result is the
109 minimum of the two alignments. */
110 lhs = known_alignment (TREE_OPERAND (exp, 0));
111 rhs = known_alignment (TREE_OPERAND (exp, 1));
112 this_alignment = MIN (lhs, rhs);
113 break;
114
115 case POINTER_PLUS_EXPR:
116 lhs = known_alignment (TREE_OPERAND (exp, 0));
117 rhs = known_alignment (TREE_OPERAND (exp, 1));
118 /* If we don't know the alignment of the offset, we assume that
119 of the base. */
120 if (rhs == 0)
121 this_alignment = lhs;
122 else
123 this_alignment = MIN (lhs, rhs);
124 break;
125
126 case COND_EXPR:
127 /* If there is a choice between two values, use the smallest one. */
128 lhs = known_alignment (TREE_OPERAND (exp, 1));
129 rhs = known_alignment (TREE_OPERAND (exp, 2));
130 this_alignment = MIN (lhs, rhs);
131 break;
132
133 case INTEGER_CST:
134 {
135 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
136 /* The first part of this represents the lowest bit in the constant,
137 but it is originally in bytes, not bits. */
138 this_alignment = MIN (BITS_PER_UNIT * (c & -c), BIGGEST_ALIGNMENT);
139 }
140 break;
141
142 case MULT_EXPR:
143 /* If we know the alignment of just one side, use it. Otherwise,
144 use the product of the alignments. */
145 lhs = known_alignment (TREE_OPERAND (exp, 0));
146 rhs = known_alignment (TREE_OPERAND (exp, 1));
147
148 if (lhs == 0)
149 this_alignment = rhs;
150 else if (rhs == 0)
151 this_alignment = lhs;
152 else
153 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
154 break;
155
156 case BIT_AND_EXPR:
157 /* A bit-and expression is as aligned as the maximum alignment of the
158 operands. We typically get here for a complex lhs and a constant
159 negative power of two on the rhs to force an explicit alignment, so
160 don't bother looking at the lhs. */
161 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
162 break;
163
164 case ADDR_EXPR:
165 this_alignment = expr_align (TREE_OPERAND (exp, 0));
166 break;
167
168 case CALL_EXPR:
169 {
170 tree t = maybe_inline_call_in_expr (exp);
171 if (t)
172 return known_alignment (t);
173 }
174
175 /* Fall through... */
176
177 default:
178 /* For other pointer expressions, we assume that the pointed-to object
179 is at least as aligned as the pointed-to type. Beware that we can
180 have a dummy type here (e.g. a Taft Amendment type), for which the
181 alignment is meaningless and should be ignored. */
182 if (POINTER_TYPE_P (TREE_TYPE (exp))
183 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
184 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
185 else
186 this_alignment = 0;
187 break;
188 }
189
190 return this_alignment;
191 }
192 \f
193 /* We have a comparison or assignment operation on two types, T1 and T2, which
194 are either both array types or both record types. T1 is assumed to be for
195 the left hand side operand, and T2 for the right hand side. Return the
196 type that both operands should be converted to for the operation, if any.
197 Otherwise return zero. */
198
199 static tree
200 find_common_type (tree t1, tree t2)
201 {
202 /* ??? As of today, various constructs lead to here with types of different
203 sizes even when both constants (e.g. tagged types, packable vs regular
204 component types, padded vs unpadded types, ...). While some of these
205 would better be handled upstream (types should be made consistent before
206 calling into build_binary_op), some others are really expected and we
207 have to be careful. */
208
209 /* We must avoid writing more than what the target can hold if this is for
210 an assignment and the case of tagged types is handled in build_binary_op
211 so we use the lhs type if it is known to be smaller or of constant size
212 and the rhs type is not, whatever the modes. We also force t1 in case of
213 constant size equality to minimize occurrences of view conversions on the
214 lhs of an assignment, except for the case of record types with a variant
215 part on the lhs but not on the rhs to make the conversion simpler. */
216 if (TREE_CONSTANT (TYPE_SIZE (t1))
217 && (!TREE_CONSTANT (TYPE_SIZE (t2))
218 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
219 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
220 && !(TREE_CODE (t1) == RECORD_TYPE
221 && TREE_CODE (t2) == RECORD_TYPE
222 && get_variant_part (t1) != NULL_TREE
223 && get_variant_part (t2) == NULL_TREE))))
224 return t1;
225
226 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
227 that we will not have any alignment problems since, if we did, the
228 non-BLKmode type could not have been used. */
229 if (TYPE_MODE (t1) != BLKmode)
230 return t1;
231
232 /* If the rhs type is of constant size, use it whatever the modes. At
233 this point it is known to be smaller, or of constant size and the
234 lhs type is not. */
235 if (TREE_CONSTANT (TYPE_SIZE (t2)))
236 return t2;
237
238 /* Otherwise, if the rhs type is non-BLKmode, use it. */
239 if (TYPE_MODE (t2) != BLKmode)
240 return t2;
241
242 /* In this case, both types have variable size and BLKmode. It's
243 probably best to leave the "type mismatch" because changing it
244 could cause a bad self-referential reference. */
245 return NULL_TREE;
246 }
247 \f
248 /* Return an expression tree representing an equality comparison of A1 and A2,
249 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
250
251 Two arrays are equal in one of two ways: (1) if both have zero length in
252 some dimension (not necessarily the same dimension) or (2) if the lengths
253 in each dimension are equal and the data is equal. We perform the length
254 tests in as efficient a manner as possible. */
255
256 static tree
257 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
258 {
259 tree result = convert (result_type, boolean_true_node);
260 tree a1_is_null = convert (result_type, boolean_false_node);
261 tree a2_is_null = convert (result_type, boolean_false_node);
262 tree t1 = TREE_TYPE (a1);
263 tree t2 = TREE_TYPE (a2);
264 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
265 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
266 bool length_zero_p = false;
267
268 /* If the operands have side-effects, they need to be evaluated only once
269 in spite of the multiple references in the comparison. */
270 if (a1_side_effects_p)
271 a1 = gnat_protect_expr (a1);
272
273 if (a2_side_effects_p)
274 a2 = gnat_protect_expr (a2);
275
276 /* Process each dimension separately and compare the lengths. If any
277 dimension has a length known to be zero, set LENGTH_ZERO_P to true
278 in order to suppress the comparison of the data at the end. */
279 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
280 {
281 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
282 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
283 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
284 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
285 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
286 size_one_node);
287 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
288 size_one_node);
289 tree comparison, this_a1_is_null, this_a2_is_null;
290
291 /* If the length of the first array is a constant, swap our operands
292 unless the length of the second array is the constant zero. */
293 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
294 {
295 tree tem;
296 bool btem;
297
298 tem = a1, a1 = a2, a2 = tem;
299 tem = t1, t1 = t2, t2 = tem;
300 tem = lb1, lb1 = lb2, lb2 = tem;
301 tem = ub1, ub1 = ub2, ub2 = tem;
302 tem = length1, length1 = length2, length2 = tem;
303 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
304 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
305 a2_side_effects_p = btem;
306 }
307
308 /* If the length of the second array is the constant zero, we can just
309 use the original stored bounds for the first array and see whether
310 last < first holds. */
311 if (integer_zerop (length2))
312 {
313 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
314
315 length_zero_p = true;
316
317 ub1
318 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
319 lb1
320 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
321
322 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
323 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
324 if (EXPR_P (comparison))
325 SET_EXPR_LOCATION (comparison, loc);
326
327 this_a1_is_null = comparison;
328 this_a2_is_null = convert (result_type, boolean_true_node);
329 }
330
331 /* Otherwise, if the length is some other constant value, we know that
332 this dimension in the second array cannot be superflat, so we can
333 just use its length computed from the actual stored bounds. */
334 else if (TREE_CODE (length2) == INTEGER_CST)
335 {
336 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
337
338 ub1
339 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
340 lb1
341 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
342 /* Note that we know that UB2 and LB2 are constant and hence
343 cannot contain a PLACEHOLDER_EXPR. */
344 ub2
345 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
346 lb2
347 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
348
349 comparison
350 = fold_build2_loc (loc, EQ_EXPR, result_type,
351 build_binary_op (MINUS_EXPR, b, ub1, lb1),
352 build_binary_op (MINUS_EXPR, b, ub2, lb2));
353 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
354 if (EXPR_P (comparison))
355 SET_EXPR_LOCATION (comparison, loc);
356
357 this_a1_is_null
358 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
359
360 this_a2_is_null = convert (result_type, boolean_false_node);
361 }
362
363 /* Otherwise, compare the computed lengths. */
364 else
365 {
366 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
367 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
368
369 comparison
370 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
371
372 /* If the length expression is of the form (cond ? val : 0), assume
373 that cond is equivalent to (length != 0). That's guaranteed by
374 construction of the array types in gnat_to_gnu_entity. */
375 if (TREE_CODE (length1) == COND_EXPR
376 && integer_zerop (TREE_OPERAND (length1, 2)))
377 this_a1_is_null
378 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
379 else
380 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
381 length1, size_zero_node);
382
383 /* Likewise for the second array. */
384 if (TREE_CODE (length2) == COND_EXPR
385 && integer_zerop (TREE_OPERAND (length2, 2)))
386 this_a2_is_null
387 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
388 else
389 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
390 length2, size_zero_node);
391 }
392
393 /* Append expressions for this dimension to the final expressions. */
394 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
395 result, comparison);
396
397 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
398 this_a1_is_null, a1_is_null);
399
400 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
401 this_a2_is_null, a2_is_null);
402
403 t1 = TREE_TYPE (t1);
404 t2 = TREE_TYPE (t2);
405 }
406
407 /* Unless the length of some dimension is known to be zero, compare the
408 data in the array. */
409 if (!length_zero_p)
410 {
411 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
412 tree comparison;
413
414 if (type)
415 {
416 a1 = convert (type, a1),
417 a2 = convert (type, a2);
418 }
419
420 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
421
422 result
423 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
424 }
425
426 /* The result is also true if both sizes are zero. */
427 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
428 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
429 a1_is_null, a2_is_null),
430 result);
431
432 /* If the operands have side-effects, they need to be evaluated before
433 doing the tests above since the place they otherwise would end up
434 being evaluated at run time could be wrong. */
435 if (a1_side_effects_p)
436 result = build2 (COMPOUND_EXPR, result_type, a1, result);
437
438 if (a2_side_effects_p)
439 result = build2 (COMPOUND_EXPR, result_type, a2, result);
440
441 return result;
442 }
443
444 /* Return an expression tree representing an equality comparison of P1 and P2,
445 two objects of fat pointer type. The result should be of type RESULT_TYPE.
446
447 Two fat pointers are equal in one of two ways: (1) if both have a null
448 pointer to the array or (2) if they contain the same couple of pointers.
449 We perform the comparison in as efficient a manner as possible. */
450
451 static tree
452 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
453 {
454 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
455 tree p1_array_is_null, p2_array_is_null;
456
457 /* If either operand has side-effects, they have to be evaluated only once
458 in spite of the multiple references to the operand in the comparison. */
459 p1 = gnat_protect_expr (p1);
460 p2 = gnat_protect_expr (p2);
461
462 /* The constant folder doesn't fold fat pointer types so we do it here. */
463 if (TREE_CODE (p1) == CONSTRUCTOR)
464 p1_array = (*CONSTRUCTOR_ELTS (p1))[0].value;
465 else
466 p1_array = build_component_ref (p1, NULL_TREE,
467 TYPE_FIELDS (TREE_TYPE (p1)), true);
468
469 p1_array_is_null
470 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
471 fold_convert_loc (loc, TREE_TYPE (p1_array),
472 null_pointer_node));
473
474 if (TREE_CODE (p2) == CONSTRUCTOR)
475 p2_array = (*CONSTRUCTOR_ELTS (p2))[0].value;
476 else
477 p2_array = build_component_ref (p2, NULL_TREE,
478 TYPE_FIELDS (TREE_TYPE (p2)), true);
479
480 p2_array_is_null
481 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
482 fold_convert_loc (loc, TREE_TYPE (p2_array),
483 null_pointer_node));
484
485 /* If one of the pointers to the array is null, just compare the other. */
486 if (integer_zerop (p1_array))
487 return p2_array_is_null;
488 else if (integer_zerop (p2_array))
489 return p1_array_is_null;
490
491 /* Otherwise, do the fully-fledged comparison. */
492 same_array
493 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
494
495 if (TREE_CODE (p1) == CONSTRUCTOR)
496 p1_bounds = (*CONSTRUCTOR_ELTS (p1))[1].value;
497 else
498 p1_bounds
499 = build_component_ref (p1, NULL_TREE,
500 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
501
502 if (TREE_CODE (p2) == CONSTRUCTOR)
503 p2_bounds = (*CONSTRUCTOR_ELTS (p2))[1].value;
504 else
505 p2_bounds
506 = build_component_ref (p2, NULL_TREE,
507 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
508
509 same_bounds
510 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
511
512 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
513 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
514 build_binary_op (TRUTH_ORIF_EXPR, result_type,
515 p1_array_is_null, same_bounds));
516 }
517 \f
518 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
519 type TYPE. We know that TYPE is a modular type with a nonbinary
520 modulus. */
521
522 static tree
523 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
524 tree rhs)
525 {
526 tree modulus = TYPE_MODULUS (type);
527 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
528 unsigned int precision;
529 bool unsignedp = true;
530 tree op_type = type;
531 tree result;
532
533 /* If this is an addition of a constant, convert it to a subtraction
534 of a constant since we can do that faster. */
535 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
536 {
537 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
538 op_code = MINUS_EXPR;
539 }
540
541 /* For the logical operations, we only need PRECISION bits. For
542 addition and subtraction, we need one more and for multiplication we
543 need twice as many. But we never want to make a size smaller than
544 our size. */
545 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
546 needed_precision += 1;
547 else if (op_code == MULT_EXPR)
548 needed_precision *= 2;
549
550 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
551
552 /* Unsigned will do for everything but subtraction. */
553 if (op_code == MINUS_EXPR)
554 unsignedp = false;
555
556 /* If our type is the wrong signedness or isn't wide enough, make a new
557 type and convert both our operands to it. */
558 if (TYPE_PRECISION (op_type) < precision
559 || TYPE_UNSIGNED (op_type) != unsignedp)
560 {
561 /* Copy the node so we ensure it can be modified to make it modular. */
562 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
563 modulus = convert (op_type, modulus);
564 SET_TYPE_MODULUS (op_type, modulus);
565 TYPE_MODULAR_P (op_type) = 1;
566 lhs = convert (op_type, lhs);
567 rhs = convert (op_type, rhs);
568 }
569
570 /* Do the operation, then we'll fix it up. */
571 result = fold_build2 (op_code, op_type, lhs, rhs);
572
573 /* For multiplication, we have no choice but to do a full modulus
574 operation. However, we want to do this in the narrowest
575 possible size. */
576 if (op_code == MULT_EXPR)
577 {
578 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
579 modulus = convert (div_type, modulus);
580 SET_TYPE_MODULUS (div_type, modulus);
581 TYPE_MODULAR_P (div_type) = 1;
582 result = convert (op_type,
583 fold_build2 (TRUNC_MOD_EXPR, div_type,
584 convert (div_type, result), modulus));
585 }
586
587 /* For subtraction, add the modulus back if we are negative. */
588 else if (op_code == MINUS_EXPR)
589 {
590 result = gnat_protect_expr (result);
591 result = fold_build3 (COND_EXPR, op_type,
592 fold_build2 (LT_EXPR, boolean_type_node, result,
593 convert (op_type, integer_zero_node)),
594 fold_build2 (PLUS_EXPR, op_type, result, modulus),
595 result);
596 }
597
598 /* For the other operations, subtract the modulus if we are >= it. */
599 else
600 {
601 result = gnat_protect_expr (result);
602 result = fold_build3 (COND_EXPR, op_type,
603 fold_build2 (GE_EXPR, boolean_type_node,
604 result, modulus),
605 fold_build2 (MINUS_EXPR, op_type,
606 result, modulus),
607 result);
608 }
609
610 return convert (type, result);
611 }
612 \f
613 /* This page contains routines that implement the Ada semantics with regard
614 to atomic objects. They are fully piggybacked on the middle-end support
615 for atomic loads and stores.
616
617 *** Memory barriers and volatile objects ***
618
619 We implement the weakened form of the C.6(16) clause that was introduced
620 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
621 implementable without significant performance hits on modern platforms.
622
623 We also take advantage of the requirements imposed on shared variables by
624 9.10 (conditions for sequential actions) to have non-erroneous execution
625 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
626 volatile updates with regard to sequential actions, i.e. with regard to
627 reads or updates of atomic objects.
628
629 As such, an update of an atomic object by a task requires that all earlier
630 accesses to volatile objects have completed. Similarly, later accesses to
631 volatile objects cannot be reordered before the update of the atomic object.
632 So, memory barriers both before and after the atomic update are needed.
633
634 For a read of an atomic object, to avoid seeing writes of volatile objects
635 by a task earlier than by the other tasks, a memory barrier is needed before
636 the atomic read. Finally, to avoid reordering later reads or updates of
637 volatile objects to before the atomic read, a barrier is needed after the
638 atomic read.
639
640 So, memory barriers are needed before and after atomic reads and updates.
641 And, in order to simplify the implementation, we use full memory barriers
642 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
643
644 /* Return the size of TYPE, which must be a positive power of 2. */
645
646 static unsigned int
647 resolve_atomic_size (tree type)
648 {
649 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
650
651 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
652 return size;
653
654 /* We shouldn't reach here without having already detected that the size
655 isn't compatible with an atomic access. */
656 gcc_assert (Serious_Errors_Detected);
657
658 return 0;
659 }
660
661 /* Build an atomic load for the underlying atomic object in SRC. */
662
663 tree
664 build_atomic_load (tree src)
665 {
666 tree ptr_type
667 = build_pointer_type
668 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
669 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
670 tree orig_src = src;
671 tree t, addr, val;
672 unsigned int size;
673 int fncode;
674
675 /* Remove conversions to get the address of the underlying object. */
676 src = remove_conversions (src, false);
677 size = resolve_atomic_size (TREE_TYPE (src));
678 if (size == 0)
679 return orig_src;
680
681 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
682 t = builtin_decl_implicit ((enum built_in_function) fncode);
683
684 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
685 val = build_call_expr (t, 2, addr, mem_model);
686
687 /* First reinterpret the loaded bits in the original type of the load,
688 then convert to the expected result type. */
689 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
690 return convert (TREE_TYPE (orig_src), t);
691 }
692
693 /* Build an atomic store from SRC to the underlying atomic object in DEST. */
694
695 tree
696 build_atomic_store (tree dest, tree src)
697 {
698 tree ptr_type
699 = build_pointer_type
700 (build_qualified_type (void_type_node, TYPE_QUAL_VOLATILE));
701 tree mem_model = build_int_cst (integer_type_node, MEMMODEL_SEQ_CST);
702 tree orig_dest = dest;
703 tree t, int_type, addr;
704 unsigned int size;
705 int fncode;
706
707 /* Remove conversions to get the address of the underlying object. */
708 dest = remove_conversions (dest, false);
709 size = resolve_atomic_size (TREE_TYPE (dest));
710 if (size == 0)
711 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
712
713 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
714 t = builtin_decl_implicit ((enum built_in_function) fncode);
715 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
716
717 /* First convert the bits to be stored to the original type of the store,
718 then reinterpret them in the effective type. But if the original type
719 is a padded type with the same size, convert to the inner type instead,
720 as we don't want to artificially introduce a CONSTRUCTOR here. */
721 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
722 && TYPE_SIZE (TREE_TYPE (dest))
723 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
724 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
725 else
726 src = convert (TREE_TYPE (dest), src);
727 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
728 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
729
730 return build_call_expr (t, 3, addr, src, mem_model);
731 }
732 \f
733 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
734 desired for the result. Usually the operation is to be performed
735 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
736 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
737 case the type to be used will be derived from the operands.
738
739 This function is very much unlike the ones for C and C++ since we
740 have already done any type conversion and matching required. All we
741 have to do here is validate the work done by SEM and handle subtypes. */
742
743 tree
744 build_binary_op (enum tree_code op_code, tree result_type,
745 tree left_operand, tree right_operand)
746 {
747 tree left_type = TREE_TYPE (left_operand);
748 tree right_type = TREE_TYPE (right_operand);
749 tree left_base_type = get_base_type (left_type);
750 tree right_base_type = get_base_type (right_type);
751 tree operation_type = result_type;
752 tree best_type = NULL_TREE;
753 tree modulus, result;
754 bool has_side_effects = false;
755
756 if (operation_type
757 && TREE_CODE (operation_type) == RECORD_TYPE
758 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
759 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
760
761 if (operation_type
762 && TREE_CODE (operation_type) == INTEGER_TYPE
763 && TYPE_EXTRA_SUBTYPE_P (operation_type))
764 operation_type = get_base_type (operation_type);
765
766 modulus = (operation_type
767 && TREE_CODE (operation_type) == INTEGER_TYPE
768 && TYPE_MODULAR_P (operation_type)
769 ? TYPE_MODULUS (operation_type) : NULL_TREE);
770
771 switch (op_code)
772 {
773 case INIT_EXPR:
774 case MODIFY_EXPR:
775 #ifdef ENABLE_CHECKING
776 gcc_assert (result_type == NULL_TREE);
777 #endif
778 /* If there were integral or pointer conversions on the LHS, remove
779 them; we'll be putting them back below if needed. Likewise for
780 conversions between array and record types, except for justified
781 modular types. But don't do this if the right operand is not
782 BLKmode (for packed arrays) unless we are not changing the mode. */
783 while ((CONVERT_EXPR_P (left_operand)
784 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
785 && (((INTEGRAL_TYPE_P (left_type)
786 || POINTER_TYPE_P (left_type))
787 && (INTEGRAL_TYPE_P (TREE_TYPE
788 (TREE_OPERAND (left_operand, 0)))
789 || POINTER_TYPE_P (TREE_TYPE
790 (TREE_OPERAND (left_operand, 0)))))
791 || (((TREE_CODE (left_type) == RECORD_TYPE
792 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
793 || TREE_CODE (left_type) == ARRAY_TYPE)
794 && ((TREE_CODE (TREE_TYPE
795 (TREE_OPERAND (left_operand, 0)))
796 == RECORD_TYPE)
797 || (TREE_CODE (TREE_TYPE
798 (TREE_OPERAND (left_operand, 0)))
799 == ARRAY_TYPE))
800 && (TYPE_MODE (right_type) == BLKmode
801 || (TYPE_MODE (left_type)
802 == TYPE_MODE (TREE_TYPE
803 (TREE_OPERAND
804 (left_operand, 0))))))))
805 {
806 left_operand = TREE_OPERAND (left_operand, 0);
807 left_type = TREE_TYPE (left_operand);
808 }
809
810 /* If a class-wide type may be involved, force use of the RHS type. */
811 if ((TREE_CODE (right_type) == RECORD_TYPE
812 || TREE_CODE (right_type) == UNION_TYPE)
813 && TYPE_ALIGN_OK (right_type))
814 operation_type = right_type;
815
816 /* If we are copying between padded objects with compatible types, use
817 the padded view of the objects, this is very likely more efficient.
818 Likewise for a padded object that is assigned a constructor, if we
819 can convert the constructor to the inner type, to avoid putting a
820 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
821 actually copied anything. */
822 else if (TYPE_IS_PADDING_P (left_type)
823 && TREE_CONSTANT (TYPE_SIZE (left_type))
824 && ((TREE_CODE (right_operand) == COMPONENT_REF
825 && TYPE_MAIN_VARIANT (left_type)
826 == TYPE_MAIN_VARIANT
827 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
828 || (TREE_CODE (right_operand) == CONSTRUCTOR
829 && !CONTAINS_PLACEHOLDER_P
830 (DECL_SIZE (TYPE_FIELDS (left_type)))))
831 && !integer_zerop (TYPE_SIZE (right_type)))
832 {
833 /* We make an exception for a BLKmode type padding a non-BLKmode
834 inner type and do the conversion of the LHS right away, since
835 unchecked_convert wouldn't do it properly. */
836 if (TYPE_MODE (left_type) == BLKmode
837 && TYPE_MODE (right_type) != BLKmode
838 && TREE_CODE (right_operand) != CONSTRUCTOR)
839 {
840 operation_type = right_type;
841 left_operand = convert (operation_type, left_operand);
842 left_type = operation_type;
843 }
844 else
845 operation_type = left_type;
846 }
847
848 /* If we have a call to a function that returns an unconstrained type
849 with default discriminant on the RHS, use the RHS type (which is
850 padded) as we cannot compute the size of the actual assignment. */
851 else if (TREE_CODE (right_operand) == CALL_EXPR
852 && TYPE_IS_PADDING_P (right_type)
853 && CONTAINS_PLACEHOLDER_P
854 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (right_type)))))
855 operation_type = right_type;
856
857 /* Find the best type to use for copying between aggregate types. */
858 else if (((TREE_CODE (left_type) == ARRAY_TYPE
859 && TREE_CODE (right_type) == ARRAY_TYPE)
860 || (TREE_CODE (left_type) == RECORD_TYPE
861 && TREE_CODE (right_type) == RECORD_TYPE))
862 && (best_type = find_common_type (left_type, right_type)))
863 operation_type = best_type;
864
865 /* Otherwise use the LHS type. */
866 else
867 operation_type = left_type;
868
869 /* Ensure everything on the LHS is valid. If we have a field reference,
870 strip anything that get_inner_reference can handle. Then remove any
871 conversions between types having the same code and mode. And mark
872 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
873 either an INDIRECT_REF, a NULL_EXPR or a DECL node. */
874 result = left_operand;
875 while (true)
876 {
877 tree restype = TREE_TYPE (result);
878
879 if (TREE_CODE (result) == COMPONENT_REF
880 || TREE_CODE (result) == ARRAY_REF
881 || TREE_CODE (result) == ARRAY_RANGE_REF)
882 while (handled_component_p (result))
883 result = TREE_OPERAND (result, 0);
884 else if (TREE_CODE (result) == REALPART_EXPR
885 || TREE_CODE (result) == IMAGPART_EXPR
886 || (CONVERT_EXPR_P (result)
887 && (((TREE_CODE (restype)
888 == TREE_CODE (TREE_TYPE
889 (TREE_OPERAND (result, 0))))
890 && (TYPE_MODE (TREE_TYPE
891 (TREE_OPERAND (result, 0)))
892 == TYPE_MODE (restype)))
893 || TYPE_ALIGN_OK (restype))))
894 result = TREE_OPERAND (result, 0);
895 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
896 {
897 TREE_ADDRESSABLE (result) = 1;
898 result = TREE_OPERAND (result, 0);
899 }
900 else
901 break;
902 }
903
904 gcc_assert (TREE_CODE (result) == INDIRECT_REF
905 || TREE_CODE (result) == NULL_EXPR
906 || DECL_P (result));
907
908 /* Convert the right operand to the operation type unless it is
909 either already of the correct type or if the type involves a
910 placeholder, since the RHS may not have the same record type. */
911 if (operation_type != right_type
912 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
913 {
914 right_operand = convert (operation_type, right_operand);
915 right_type = operation_type;
916 }
917
918 /* If the left operand is not of the same type as the operation
919 type, wrap it up in a VIEW_CONVERT_EXPR. */
920 if (left_type != operation_type)
921 left_operand = unchecked_convert (operation_type, left_operand, false);
922
923 has_side_effects = true;
924 modulus = NULL_TREE;
925 break;
926
927 case ARRAY_REF:
928 if (!operation_type)
929 operation_type = TREE_TYPE (left_type);
930
931 /* ... fall through ... */
932
933 case ARRAY_RANGE_REF:
934 /* First look through conversion between type variants. Note that
935 this changes neither the operation type nor the type domain. */
936 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
937 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
938 == TYPE_MAIN_VARIANT (left_type))
939 {
940 left_operand = TREE_OPERAND (left_operand, 0);
941 left_type = TREE_TYPE (left_operand);
942 }
943
944 /* For a range, make sure the element type is consistent. */
945 if (op_code == ARRAY_RANGE_REF
946 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
947 operation_type = build_array_type (TREE_TYPE (left_type),
948 TYPE_DOMAIN (operation_type));
949
950 /* Then convert the right operand to its base type. This will prevent
951 unneeded sign conversions when sizetype is wider than integer. */
952 right_operand = convert (right_base_type, right_operand);
953 right_operand = convert_to_index_type (right_operand);
954 modulus = NULL_TREE;
955 break;
956
957 case TRUTH_ANDIF_EXPR:
958 case TRUTH_ORIF_EXPR:
959 case TRUTH_AND_EXPR:
960 case TRUTH_OR_EXPR:
961 case TRUTH_XOR_EXPR:
962 #ifdef ENABLE_CHECKING
963 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
964 #endif
965 operation_type = left_base_type;
966 left_operand = convert (operation_type, left_operand);
967 right_operand = convert (operation_type, right_operand);
968 break;
969
970 case GE_EXPR:
971 case LE_EXPR:
972 case GT_EXPR:
973 case LT_EXPR:
974 case EQ_EXPR:
975 case NE_EXPR:
976 #ifdef ENABLE_CHECKING
977 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
978 #endif
979 /* If either operand is a NULL_EXPR, just return a new one. */
980 if (TREE_CODE (left_operand) == NULL_EXPR)
981 return build2 (op_code, result_type,
982 build1 (NULL_EXPR, integer_type_node,
983 TREE_OPERAND (left_operand, 0)),
984 integer_zero_node);
985
986 else if (TREE_CODE (right_operand) == NULL_EXPR)
987 return build2 (op_code, result_type,
988 build1 (NULL_EXPR, integer_type_node,
989 TREE_OPERAND (right_operand, 0)),
990 integer_zero_node);
991
992 /* If either object is a justified modular types, get the
993 fields from within. */
994 if (TREE_CODE (left_type) == RECORD_TYPE
995 && TYPE_JUSTIFIED_MODULAR_P (left_type))
996 {
997 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
998 left_operand);
999 left_type = TREE_TYPE (left_operand);
1000 left_base_type = get_base_type (left_type);
1001 }
1002
1003 if (TREE_CODE (right_type) == RECORD_TYPE
1004 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1005 {
1006 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1007 right_operand);
1008 right_type = TREE_TYPE (right_operand);
1009 right_base_type = get_base_type (right_type);
1010 }
1011
1012 /* If both objects are arrays, compare them specially. */
1013 if ((TREE_CODE (left_type) == ARRAY_TYPE
1014 || (TREE_CODE (left_type) == INTEGER_TYPE
1015 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1016 && (TREE_CODE (right_type) == ARRAY_TYPE
1017 || (TREE_CODE (right_type) == INTEGER_TYPE
1018 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1019 {
1020 result = compare_arrays (input_location,
1021 result_type, left_operand, right_operand);
1022 if (op_code == NE_EXPR)
1023 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1024 else
1025 gcc_assert (op_code == EQ_EXPR);
1026
1027 return result;
1028 }
1029
1030 /* Otherwise, the base types must be the same, unless they are both fat
1031 pointer types or record types. In the latter case, use the best type
1032 and convert both operands to that type. */
1033 if (left_base_type != right_base_type)
1034 {
1035 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1036 && TYPE_IS_FAT_POINTER_P (right_base_type))
1037 {
1038 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1039 == TYPE_MAIN_VARIANT (right_base_type));
1040 best_type = left_base_type;
1041 }
1042
1043 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1044 && TREE_CODE (right_base_type) == RECORD_TYPE)
1045 {
1046 /* The only way this is permitted is if both types have the same
1047 name. In that case, one of them must not be self-referential.
1048 Use it as the best type. Even better with a fixed size. */
1049 gcc_assert (TYPE_NAME (left_base_type)
1050 && TYPE_NAME (left_base_type)
1051 == TYPE_NAME (right_base_type));
1052
1053 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1054 best_type = left_base_type;
1055 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1056 best_type = right_base_type;
1057 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1058 best_type = left_base_type;
1059 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1060 best_type = right_base_type;
1061 else
1062 gcc_unreachable ();
1063 }
1064
1065 else
1066 gcc_unreachable ();
1067
1068 left_operand = convert (best_type, left_operand);
1069 right_operand = convert (best_type, right_operand);
1070 }
1071 else
1072 {
1073 left_operand = convert (left_base_type, left_operand);
1074 right_operand = convert (right_base_type, right_operand);
1075 }
1076
1077 /* If both objects are fat pointers, compare them specially. */
1078 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1079 {
1080 result
1081 = compare_fat_pointers (input_location,
1082 result_type, left_operand, right_operand);
1083 if (op_code == NE_EXPR)
1084 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1085 else
1086 gcc_assert (op_code == EQ_EXPR);
1087
1088 return result;
1089 }
1090
1091 modulus = NULL_TREE;
1092 break;
1093
1094 case LSHIFT_EXPR:
1095 case RSHIFT_EXPR:
1096 case LROTATE_EXPR:
1097 case RROTATE_EXPR:
1098 /* The RHS of a shift can be any type. Also, ignore any modulus
1099 (we used to abort, but this is needed for unchecked conversion
1100 to modular types). Otherwise, processing is the same as normal. */
1101 gcc_assert (operation_type == left_base_type);
1102 modulus = NULL_TREE;
1103 left_operand = convert (operation_type, left_operand);
1104 break;
1105
1106 case BIT_AND_EXPR:
1107 case BIT_IOR_EXPR:
1108 case BIT_XOR_EXPR:
1109 /* For binary modulus, if the inputs are in range, so are the
1110 outputs. */
1111 if (modulus && integer_pow2p (modulus))
1112 modulus = NULL_TREE;
1113 goto common;
1114
1115 case COMPLEX_EXPR:
1116 gcc_assert (TREE_TYPE (result_type) == left_base_type
1117 && TREE_TYPE (result_type) == right_base_type);
1118 left_operand = convert (left_base_type, left_operand);
1119 right_operand = convert (right_base_type, right_operand);
1120 break;
1121
1122 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1123 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1124 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1125 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1126 /* These always produce results lower than either operand. */
1127 modulus = NULL_TREE;
1128 goto common;
1129
1130 case POINTER_PLUS_EXPR:
1131 gcc_assert (operation_type == left_base_type
1132 && sizetype == right_base_type);
1133 left_operand = convert (operation_type, left_operand);
1134 right_operand = convert (sizetype, right_operand);
1135 break;
1136
1137 case PLUS_NOMOD_EXPR:
1138 case MINUS_NOMOD_EXPR:
1139 if (op_code == PLUS_NOMOD_EXPR)
1140 op_code = PLUS_EXPR;
1141 else
1142 op_code = MINUS_EXPR;
1143 modulus = NULL_TREE;
1144
1145 /* ... fall through ... */
1146
1147 case PLUS_EXPR:
1148 case MINUS_EXPR:
1149 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1150 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1151 these types but can generate addition/subtraction for Succ/Pred. */
1152 if (operation_type
1153 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1154 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1155 operation_type = left_base_type = right_base_type
1156 = gnat_type_for_mode (TYPE_MODE (operation_type),
1157 TYPE_UNSIGNED (operation_type));
1158
1159 /* ... fall through ... */
1160
1161 default:
1162 common:
1163 /* The result type should be the same as the base types of the
1164 both operands (and they should be the same). Convert
1165 everything to the result type. */
1166
1167 gcc_assert (operation_type == left_base_type
1168 && left_base_type == right_base_type);
1169 left_operand = convert (operation_type, left_operand);
1170 right_operand = convert (operation_type, right_operand);
1171 }
1172
1173 if (modulus && !integer_pow2p (modulus))
1174 {
1175 result = nonbinary_modular_operation (op_code, operation_type,
1176 left_operand, right_operand);
1177 modulus = NULL_TREE;
1178 }
1179 /* If either operand is a NULL_EXPR, just return a new one. */
1180 else if (TREE_CODE (left_operand) == NULL_EXPR)
1181 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1182 else if (TREE_CODE (right_operand) == NULL_EXPR)
1183 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1184 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1185 result = fold (build4 (op_code, operation_type, left_operand,
1186 right_operand, NULL_TREE, NULL_TREE));
1187 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1188 result = build2 (op_code, void_type_node, left_operand, right_operand);
1189 else
1190 result
1191 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1192
1193 if (TREE_CONSTANT (result))
1194 ;
1195 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1196 {
1197 if (TYPE_VOLATILE (operation_type))
1198 TREE_THIS_VOLATILE (result) = 1;
1199 }
1200 else
1201 TREE_CONSTANT (result)
1202 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1203
1204 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1205
1206 /* If we are working with modular types, perform the MOD operation
1207 if something above hasn't eliminated the need for it. */
1208 if (modulus)
1209 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1210 convert (operation_type, modulus));
1211
1212 if (result_type && result_type != operation_type)
1213 result = convert (result_type, result);
1214
1215 return result;
1216 }
1217 \f
1218 /* Similar, but for unary operations. */
1219
1220 tree
1221 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1222 {
1223 tree type = TREE_TYPE (operand);
1224 tree base_type = get_base_type (type);
1225 tree operation_type = result_type;
1226 tree result;
1227
1228 if (operation_type
1229 && TREE_CODE (operation_type) == RECORD_TYPE
1230 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1231 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1232
1233 if (operation_type
1234 && TREE_CODE (operation_type) == INTEGER_TYPE
1235 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1236 operation_type = get_base_type (operation_type);
1237
1238 switch (op_code)
1239 {
1240 case REALPART_EXPR:
1241 case IMAGPART_EXPR:
1242 if (!operation_type)
1243 result_type = operation_type = TREE_TYPE (type);
1244 else
1245 gcc_assert (result_type == TREE_TYPE (type));
1246
1247 result = fold_build1 (op_code, operation_type, operand);
1248 break;
1249
1250 case TRUTH_NOT_EXPR:
1251 #ifdef ENABLE_CHECKING
1252 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1253 #endif
1254 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1255 /* When not optimizing, fold the result as invert_truthvalue_loc
1256 doesn't fold the result of comparisons. This is intended to undo
1257 the trick used for boolean rvalues in gnat_to_gnu. */
1258 if (!optimize)
1259 result = fold (result);
1260 break;
1261
1262 case ATTR_ADDR_EXPR:
1263 case ADDR_EXPR:
1264 switch (TREE_CODE (operand))
1265 {
1266 case INDIRECT_REF:
1267 case UNCONSTRAINED_ARRAY_REF:
1268 result = TREE_OPERAND (operand, 0);
1269
1270 /* Make sure the type here is a pointer, not a reference.
1271 GCC wants pointer types for function addresses. */
1272 if (!result_type)
1273 result_type = build_pointer_type (type);
1274
1275 /* If the underlying object can alias everything, propagate the
1276 property since we are effectively retrieving the object. */
1277 if (POINTER_TYPE_P (TREE_TYPE (result))
1278 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1279 {
1280 if (TREE_CODE (result_type) == POINTER_TYPE
1281 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1282 result_type
1283 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1284 TYPE_MODE (result_type),
1285 true);
1286 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1287 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1288 result_type
1289 = build_reference_type_for_mode (TREE_TYPE (result_type),
1290 TYPE_MODE (result_type),
1291 true);
1292 }
1293 break;
1294
1295 case NULL_EXPR:
1296 result = operand;
1297 TREE_TYPE (result) = type = build_pointer_type (type);
1298 break;
1299
1300 case COMPOUND_EXPR:
1301 /* Fold a compound expression if it has unconstrained array type
1302 since the middle-end cannot handle it. But we don't it in the
1303 general case because it may introduce aliasing issues if the
1304 first operand is an indirect assignment and the second operand
1305 the corresponding address, e.g. for an allocator. */
1306 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1307 {
1308 result = build_unary_op (ADDR_EXPR, result_type,
1309 TREE_OPERAND (operand, 1));
1310 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1311 TREE_OPERAND (operand, 0), result);
1312 break;
1313 }
1314 goto common;
1315
1316 case ARRAY_REF:
1317 case ARRAY_RANGE_REF:
1318 case COMPONENT_REF:
1319 case BIT_FIELD_REF:
1320 /* If this is for 'Address, find the address of the prefix and add
1321 the offset to the field. Otherwise, do this the normal way. */
1322 if (op_code == ATTR_ADDR_EXPR)
1323 {
1324 HOST_WIDE_INT bitsize;
1325 HOST_WIDE_INT bitpos;
1326 tree offset, inner;
1327 machine_mode mode;
1328 int unsignedp, volatilep;
1329
1330 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1331 &mode, &unsignedp, &volatilep,
1332 false);
1333
1334 /* If INNER is a padding type whose field has a self-referential
1335 size, convert to that inner type. We know the offset is zero
1336 and we need to have that type visible. */
1337 if (TYPE_IS_PADDING_P (TREE_TYPE (inner))
1338 && CONTAINS_PLACEHOLDER_P
1339 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1340 (TREE_TYPE (inner))))))
1341 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1342 inner);
1343
1344 /* Compute the offset as a byte offset from INNER. */
1345 if (!offset)
1346 offset = size_zero_node;
1347
1348 offset = size_binop (PLUS_EXPR, offset,
1349 size_int (bitpos / BITS_PER_UNIT));
1350
1351 /* Take the address of INNER, convert the offset to void *, and
1352 add then. It will later be converted to the desired result
1353 type, if any. */
1354 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1355 inner = convert (ptr_void_type_node, inner);
1356 result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
1357 inner, offset);
1358 result = convert (build_pointer_type (TREE_TYPE (operand)),
1359 result);
1360 break;
1361 }
1362 goto common;
1363
1364 case CONSTRUCTOR:
1365 /* If this is just a constructor for a padded record, we can
1366 just take the address of the single field and convert it to
1367 a pointer to our type. */
1368 if (TYPE_IS_PADDING_P (type))
1369 {
1370 result = (*CONSTRUCTOR_ELTS (operand))[0].value;
1371 result = convert (build_pointer_type (TREE_TYPE (operand)),
1372 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1373 break;
1374 }
1375
1376 goto common;
1377
1378 case NOP_EXPR:
1379 if (AGGREGATE_TYPE_P (type)
1380 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1381 return build_unary_op (ADDR_EXPR, result_type,
1382 TREE_OPERAND (operand, 0));
1383
1384 /* ... fallthru ... */
1385
1386 case VIEW_CONVERT_EXPR:
1387 /* If this just a variant conversion or if the conversion doesn't
1388 change the mode, get the result type from this type and go down.
1389 This is needed for conversions of CONST_DECLs, to eventually get
1390 to the address of their CORRESPONDING_VARs. */
1391 if ((TYPE_MAIN_VARIANT (type)
1392 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1393 || (TYPE_MODE (type) != BLKmode
1394 && (TYPE_MODE (type)
1395 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1396 return build_unary_op (ADDR_EXPR,
1397 (result_type ? result_type
1398 : build_pointer_type (type)),
1399 TREE_OPERAND (operand, 0));
1400 goto common;
1401
1402 case CONST_DECL:
1403 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1404
1405 /* ... fall through ... */
1406
1407 default:
1408 common:
1409
1410 /* If we are taking the address of a padded record whose field
1411 contains a template, take the address of the field. */
1412 if (TYPE_IS_PADDING_P (type)
1413 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1414 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1415 {
1416 type = TREE_TYPE (TYPE_FIELDS (type));
1417 operand = convert (type, operand);
1418 }
1419
1420 gnat_mark_addressable (operand);
1421 result = build_fold_addr_expr (operand);
1422 }
1423
1424 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1425 break;
1426
1427 case INDIRECT_REF:
1428 {
1429 tree t = remove_conversions (operand, false);
1430 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1431
1432 /* If TYPE is a thin pointer, either first retrieve the base if this
1433 is an expression with an offset built for the initialization of an
1434 object with an unconstrained nominal subtype, or else convert to
1435 the fat pointer. */
1436 if (TYPE_IS_THIN_POINTER_P (type))
1437 {
1438 tree rec_type = TREE_TYPE (type);
1439
1440 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1441 && TREE_OPERAND (operand, 1)
1442 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1443 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1444 {
1445 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1446 type = TREE_TYPE (operand);
1447 }
1448 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1449 {
1450 operand
1451 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1452 operand);
1453 type = TREE_TYPE (operand);
1454 }
1455 }
1456
1457 /* If we want to refer to an unconstrained array, use the appropriate
1458 expression. But this will never survive down to the back-end. */
1459 if (TYPE_IS_FAT_POINTER_P (type))
1460 {
1461 result = build1 (UNCONSTRAINED_ARRAY_REF,
1462 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1463 TREE_READONLY (result)
1464 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1465 }
1466
1467 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1468 else if (TREE_CODE (operand) == ADDR_EXPR)
1469 result = TREE_OPERAND (operand, 0);
1470
1471 /* Otherwise, build and fold the indirect reference. */
1472 else
1473 {
1474 result = build_fold_indirect_ref (operand);
1475 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1476 }
1477
1478 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1479 {
1480 TREE_SIDE_EFFECTS (result) = 1;
1481 if (TREE_CODE (result) == INDIRECT_REF)
1482 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1483 }
1484
1485 if ((TREE_CODE (result) == INDIRECT_REF
1486 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1487 && can_never_be_null)
1488 TREE_THIS_NOTRAP (result) = 1;
1489
1490 break;
1491 }
1492
1493 case NEGATE_EXPR:
1494 case BIT_NOT_EXPR:
1495 {
1496 tree modulus = ((operation_type
1497 && TREE_CODE (operation_type) == INTEGER_TYPE
1498 && TYPE_MODULAR_P (operation_type))
1499 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1500 int mod_pow2 = modulus && integer_pow2p (modulus);
1501
1502 /* If this is a modular type, there are various possibilities
1503 depending on the operation and whether the modulus is a
1504 power of two or not. */
1505
1506 if (modulus)
1507 {
1508 gcc_assert (operation_type == base_type);
1509 operand = convert (operation_type, operand);
1510
1511 /* The fastest in the negate case for binary modulus is
1512 the straightforward code; the TRUNC_MOD_EXPR below
1513 is an AND operation. */
1514 if (op_code == NEGATE_EXPR && mod_pow2)
1515 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1516 fold_build1 (NEGATE_EXPR, operation_type,
1517 operand),
1518 modulus);
1519
1520 /* For nonbinary negate case, return zero for zero operand,
1521 else return the modulus minus the operand. If the modulus
1522 is a power of two minus one, we can do the subtraction
1523 as an XOR since it is equivalent and faster on most machines. */
1524 else if (op_code == NEGATE_EXPR && !mod_pow2)
1525 {
1526 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1527 modulus,
1528 convert (operation_type,
1529 integer_one_node))))
1530 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1531 operand, modulus);
1532 else
1533 result = fold_build2 (MINUS_EXPR, operation_type,
1534 modulus, operand);
1535
1536 result = fold_build3 (COND_EXPR, operation_type,
1537 fold_build2 (NE_EXPR,
1538 boolean_type_node,
1539 operand,
1540 convert
1541 (operation_type,
1542 integer_zero_node)),
1543 result, operand);
1544 }
1545 else
1546 {
1547 /* For the NOT cases, we need a constant equal to
1548 the modulus minus one. For a binary modulus, we
1549 XOR against the constant and subtract the operand from
1550 that constant for nonbinary modulus. */
1551
1552 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1553 convert (operation_type,
1554 integer_one_node));
1555
1556 if (mod_pow2)
1557 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1558 operand, cnst);
1559 else
1560 result = fold_build2 (MINUS_EXPR, operation_type,
1561 cnst, operand);
1562 }
1563
1564 break;
1565 }
1566 }
1567
1568 /* ... fall through ... */
1569
1570 default:
1571 gcc_assert (operation_type == base_type);
1572 result = fold_build1 (op_code, operation_type,
1573 convert (operation_type, operand));
1574 }
1575
1576 if (result_type && TREE_TYPE (result) != result_type)
1577 result = convert (result_type, result);
1578
1579 return result;
1580 }
1581 \f
1582 /* Similar, but for COND_EXPR. */
1583
1584 tree
1585 build_cond_expr (tree result_type, tree condition_operand,
1586 tree true_operand, tree false_operand)
1587 {
1588 bool addr_p = false;
1589 tree result;
1590
1591 /* The front-end verified that result, true and false operands have
1592 same base type. Convert everything to the result type. */
1593 true_operand = convert (result_type, true_operand);
1594 false_operand = convert (result_type, false_operand);
1595
1596 /* If the result type is unconstrained, take the address of the operands and
1597 then dereference the result. Likewise if the result type is passed by
1598 reference, because creating a temporary of this type is not allowed. */
1599 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1600 || TYPE_IS_BY_REFERENCE_P (result_type)
1601 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1602 {
1603 result_type = build_pointer_type (result_type);
1604 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1605 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1606 addr_p = true;
1607 }
1608
1609 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1610 true_operand, false_operand);
1611
1612 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1613 in both arms, make sure it gets evaluated by moving it ahead of the
1614 conditional expression. This is necessary because it is evaluated
1615 in only one place at run time and would otherwise be uninitialized
1616 in one of the arms. */
1617 true_operand = skip_simple_arithmetic (true_operand);
1618 false_operand = skip_simple_arithmetic (false_operand);
1619
1620 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1621 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1622
1623 if (addr_p)
1624 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1625
1626 return result;
1627 }
1628
1629 /* Similar, but for COMPOUND_EXPR. */
1630
1631 tree
1632 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1633 {
1634 bool addr_p = false;
1635 tree result;
1636
1637 /* If the result type is unconstrained, take the address of the operand and
1638 then dereference the result. Likewise if the result type is passed by
1639 reference, but this is natively handled in the gimplifier. */
1640 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1641 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1642 {
1643 result_type = build_pointer_type (result_type);
1644 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1645 addr_p = true;
1646 }
1647
1648 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1649 expr_operand);
1650
1651 if (addr_p)
1652 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1653
1654 return result;
1655 }
1656 \f
1657 /* Conveniently construct a function call expression. FNDECL names the
1658 function to be called, N is the number of arguments, and the "..."
1659 parameters are the argument expressions. Unlike build_call_expr
1660 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1661
1662 tree
1663 build_call_n_expr (tree fndecl, int n, ...)
1664 {
1665 va_list ap;
1666 tree fntype = TREE_TYPE (fndecl);
1667 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1668
1669 va_start (ap, n);
1670 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1671 va_end (ap);
1672 return fn;
1673 }
1674 \f
1675 /* Call a function that raises an exception and pass the line number and file
1676 name, if requested. MSG says which exception function to call.
1677
1678 GNAT_NODE is the gnat node conveying the source location for which the
1679 error should be signaled, or Empty in which case the error is signaled on
1680 the current ref_file_name/input_line.
1681
1682 KIND says which kind of exception this is for
1683 (N_Raise_{Constraint,Storage,Program}_Error). */
1684
1685 tree
1686 build_call_raise (int msg, Node_Id gnat_node, char kind)
1687 {
1688 tree fndecl = gnat_raise_decls[msg];
1689 tree label = get_exception_label (kind);
1690 tree filename;
1691 int line_number;
1692 const char *str;
1693 int len;
1694
1695 /* If this is to be done as a goto, handle that case. */
1696 if (label)
1697 {
1698 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1699 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1700
1701 /* If Local_Raise is present, generate
1702 Local_Raise (exception'Identity); */
1703 if (Present (local_raise))
1704 {
1705 tree gnu_local_raise
1706 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1707 tree gnu_exception_entity
1708 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1709 tree gnu_call
1710 = build_call_n_expr (gnu_local_raise, 1,
1711 build_unary_op (ADDR_EXPR, NULL_TREE,
1712 gnu_exception_entity));
1713
1714 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1715 gnu_call, gnu_result);}
1716
1717 return gnu_result;
1718 }
1719
1720 str
1721 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1722 ? ""
1723 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1724 ? IDENTIFIER_POINTER
1725 (get_identifier (Get_Name_String
1726 (Debug_Source_Name
1727 (Get_Source_File_Index (Sloc (gnat_node))))))
1728 : ref_filename;
1729
1730 len = strlen (str);
1731 filename = build_string (len, str);
1732 line_number
1733 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1734 ? Get_Logical_Line_Number (Sloc(gnat_node))
1735 : LOCATION_LINE (input_location);
1736
1737 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1738 build_index_type (size_int (len)));
1739
1740 return
1741 build_call_n_expr (fndecl, 2,
1742 build1 (ADDR_EXPR,
1743 build_pointer_type (unsigned_char_type_node),
1744 filename),
1745 build_int_cst (NULL_TREE, line_number));
1746 }
1747
1748 /* Similar to build_call_raise, for an index or range check exception as
1749 determined by MSG, with extra information generated of the form
1750 "INDEX out of range FIRST..LAST". */
1751
1752 tree
1753 build_call_raise_range (int msg, Node_Id gnat_node,
1754 tree index, tree first, tree last)
1755 {
1756 tree fndecl = gnat_raise_decls_ext[msg];
1757 tree filename;
1758 int line_number, column_number;
1759 const char *str;
1760 int len;
1761
1762 str
1763 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1764 ? ""
1765 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1766 ? IDENTIFIER_POINTER
1767 (get_identifier (Get_Name_String
1768 (Debug_Source_Name
1769 (Get_Source_File_Index (Sloc (gnat_node))))))
1770 : ref_filename;
1771
1772 len = strlen (str);
1773 filename = build_string (len, str);
1774 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1775 {
1776 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1777 column_number = Get_Column_Number (Sloc (gnat_node));
1778 }
1779 else
1780 {
1781 line_number = LOCATION_LINE (input_location);
1782 column_number = 0;
1783 }
1784
1785 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1786 build_index_type (size_int (len)));
1787
1788 return
1789 build_call_n_expr (fndecl, 6,
1790 build1 (ADDR_EXPR,
1791 build_pointer_type (unsigned_char_type_node),
1792 filename),
1793 build_int_cst (NULL_TREE, line_number),
1794 build_int_cst (NULL_TREE, column_number),
1795 convert (integer_type_node, index),
1796 convert (integer_type_node, first),
1797 convert (integer_type_node, last));
1798 }
1799
1800 /* Similar to build_call_raise, with extra information about the column
1801 where the check failed. */
1802
1803 tree
1804 build_call_raise_column (int msg, Node_Id gnat_node)
1805 {
1806 tree fndecl = gnat_raise_decls_ext[msg];
1807 tree filename;
1808 int line_number, column_number;
1809 const char *str;
1810 int len;
1811
1812 str
1813 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1814 ? ""
1815 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1816 ? IDENTIFIER_POINTER
1817 (get_identifier (Get_Name_String
1818 (Debug_Source_Name
1819 (Get_Source_File_Index (Sloc (gnat_node))))))
1820 : ref_filename;
1821
1822 len = strlen (str);
1823 filename = build_string (len, str);
1824 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1825 {
1826 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1827 column_number = Get_Column_Number (Sloc (gnat_node));
1828 }
1829 else
1830 {
1831 line_number = LOCATION_LINE (input_location);
1832 column_number = 0;
1833 }
1834
1835 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1836 build_index_type (size_int (len)));
1837
1838 return
1839 build_call_n_expr (fndecl, 3,
1840 build1 (ADDR_EXPR,
1841 build_pointer_type (unsigned_char_type_node),
1842 filename),
1843 build_int_cst (NULL_TREE, line_number),
1844 build_int_cst (NULL_TREE, column_number));
1845 }
1846 \f
1847 /* qsort comparer for the bit positions of two constructor elements
1848 for record components. */
1849
1850 static int
1851 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1852 {
1853 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1854 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1855 const_tree const field1 = elmt1->index;
1856 const_tree const field2 = elmt2->index;
1857 const int ret
1858 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1859
1860 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1861 }
1862
1863 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1864
1865 tree
1866 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1867 {
1868 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1869 bool read_only = true;
1870 bool side_effects = false;
1871 tree result, obj, val;
1872 unsigned int n_elmts;
1873
1874 /* Scan the elements to see if they are all constant or if any has side
1875 effects, to let us set global flags on the resulting constructor. Count
1876 the elements along the way for possible sorting purposes below. */
1877 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1878 {
1879 /* The predicate must be in keeping with output_constructor. */
1880 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1881 || (TREE_CODE (type) == RECORD_TYPE
1882 && CONSTRUCTOR_BITFIELD_P (obj)
1883 && !initializer_constant_valid_for_bitfield_p (val))
1884 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1885 allconstant = false;
1886
1887 if (!TREE_READONLY (val))
1888 read_only = false;
1889
1890 if (TREE_SIDE_EFFECTS (val))
1891 side_effects = true;
1892 }
1893
1894 /* For record types with constant components only, sort field list
1895 by increasing bit position. This is necessary to ensure the
1896 constructor can be output as static data. */
1897 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1898 v->qsort (compare_elmt_bitpos);
1899
1900 result = build_constructor (type, v);
1901 CONSTRUCTOR_NO_CLEARING (result) = 1;
1902 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1903 TREE_SIDE_EFFECTS (result) = side_effects;
1904 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1905 return result;
1906 }
1907 \f
1908 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1909 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1910 for the field. Don't fold the result if NO_FOLD_P is true.
1911
1912 We also handle the fact that we might have been passed a pointer to the
1913 actual record and know how to look for fields in variant parts. */
1914
1915 static tree
1916 build_simple_component_ref (tree record_variable, tree component, tree field,
1917 bool no_fold_p)
1918 {
1919 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1920 tree base, ref;
1921
1922 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
1923 && COMPLETE_TYPE_P (record_type)
1924 && (component == NULL_TREE) != (field == NULL_TREE));
1925
1926 /* If no field was specified, look for a field with the specified name in
1927 the current record only. */
1928 if (!field)
1929 for (field = TYPE_FIELDS (record_type);
1930 field;
1931 field = DECL_CHAIN (field))
1932 if (DECL_NAME (field) == component)
1933 break;
1934
1935 if (!field)
1936 return NULL_TREE;
1937
1938 /* If this field is not in the specified record, see if we can find a field
1939 in the specified record whose original field is the same as this one. */
1940 if (DECL_CONTEXT (field) != record_type)
1941 {
1942 tree new_field;
1943
1944 /* First loop through normal components. */
1945 for (new_field = TYPE_FIELDS (record_type);
1946 new_field;
1947 new_field = DECL_CHAIN (new_field))
1948 if (SAME_FIELD_P (field, new_field))
1949 break;
1950
1951 /* Next, see if we're looking for an inherited component in an extension.
1952 If so, look through the extension directly, unless the type contains
1953 a placeholder, as it might be needed for a later substitution. */
1954 if (!new_field
1955 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
1956 && TYPE_ALIGN_OK (record_type)
1957 && !type_contains_placeholder_p (record_type)
1958 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
1959 == RECORD_TYPE
1960 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
1961 {
1962 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
1963 NULL_TREE, field, no_fold_p);
1964 if (ref)
1965 return ref;
1966 }
1967
1968 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
1969 component in the first search. Doing this search in two steps is
1970 required to avoid hidden homonymous fields in the _Parent field. */
1971 if (!new_field)
1972 for (new_field = TYPE_FIELDS (record_type);
1973 new_field;
1974 new_field = DECL_CHAIN (new_field))
1975 if (DECL_INTERNAL_P (new_field))
1976 {
1977 tree field_ref
1978 = build_simple_component_ref (record_variable,
1979 NULL_TREE, new_field, no_fold_p);
1980 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
1981 no_fold_p);
1982 if (ref)
1983 return ref;
1984 }
1985
1986 field = new_field;
1987 }
1988
1989 if (!field)
1990 return NULL_TREE;
1991
1992 /* If the field's offset has overflowed, do not try to access it, as doing
1993 so may trigger sanity checks deeper in the back-end. Note that we don't
1994 need to warn since this will be done on trying to declare the object. */
1995 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
1996 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
1997 return NULL_TREE;
1998
1999 /* We have found a suitable field. Before building the COMPONENT_REF, get
2000 the base object of the record variable if possible. */
2001 base = record_variable;
2002
2003 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
2004 {
2005 tree inner_variable = TREE_OPERAND (record_variable, 0);
2006 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
2007
2008 /* Look through a conversion between type variants. This is transparent
2009 as far as the field is concerned. */
2010 if (inner_type == record_type)
2011 base = inner_variable;
2012
2013 /* Look through a conversion between original and packable version, but
2014 the field needs to be adjusted in this case. */
2015 else if (RECORD_OR_UNION_TYPE_P (inner_type)
2016 && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2017 {
2018 tree new_field;
2019
2020 for (new_field = TYPE_FIELDS (inner_type);
2021 new_field;
2022 new_field = DECL_CHAIN (new_field))
2023 if (SAME_FIELD_P (field, new_field))
2024 break;
2025 if (new_field)
2026 {
2027 field = new_field;
2028 base = inner_variable;
2029 }
2030 }
2031 }
2032
2033 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2034
2035 if (TREE_READONLY (record_variable)
2036 || TREE_READONLY (field)
2037 || TYPE_READONLY (record_type))
2038 TREE_READONLY (ref) = 1;
2039
2040 if (TREE_THIS_VOLATILE (record_variable)
2041 || TREE_THIS_VOLATILE (field)
2042 || TYPE_VOLATILE (record_type))
2043 TREE_THIS_VOLATILE (ref) = 1;
2044
2045 if (no_fold_p)
2046 return ref;
2047
2048 /* The generic folder may punt in this case because the inner array type
2049 can be self-referential, but folding is in fact not problematic. */
2050 if (TREE_CODE (base) == CONSTRUCTOR
2051 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2052 {
2053 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (base);
2054 unsigned HOST_WIDE_INT idx;
2055 tree index, value;
2056 FOR_EACH_CONSTRUCTOR_ELT (elts, idx, index, value)
2057 if (index == field)
2058 return value;
2059 return ref;
2060 }
2061
2062 return fold (ref);
2063 }
2064 \f
2065 /* Likewise, but generate a Constraint_Error if the reference could not be
2066 found. */
2067
2068 tree
2069 build_component_ref (tree record_variable, tree component, tree field,
2070 bool no_fold_p)
2071 {
2072 tree ref = build_simple_component_ref (record_variable, component, field,
2073 no_fold_p);
2074 if (ref)
2075 return ref;
2076
2077 /* If FIELD was specified, assume this is an invalid user field so raise
2078 Constraint_Error. Otherwise, we have no type to return so abort. */
2079 gcc_assert (field);
2080 return build1 (NULL_EXPR, TREE_TYPE (field),
2081 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2082 N_Raise_Constraint_Error));
2083 }
2084 \f
2085 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2086 identically. Process the case where a GNAT_PROC to call is provided. */
2087
2088 static inline tree
2089 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2090 Entity_Id gnat_proc, Entity_Id gnat_pool)
2091 {
2092 tree gnu_proc = gnat_to_gnu (gnat_proc);
2093 tree gnu_call;
2094
2095 /* A storage pool's underlying type is a record type (for both predefined
2096 storage pools and GNAT simple storage pools). The secondary stack uses
2097 the same mechanism, but its pool object (SS_Pool) is an integer. */
2098 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2099 {
2100 /* The size is the third parameter; the alignment is the
2101 same type. */
2102 Entity_Id gnat_size_type
2103 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2104 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2105
2106 tree gnu_pool = gnat_to_gnu (gnat_pool);
2107 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2108 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2109
2110 gnu_size = convert (gnu_size_type, gnu_size);
2111 gnu_align = convert (gnu_size_type, gnu_align);
2112
2113 /* The first arg is always the address of the storage pool; next
2114 comes the address of the object, for a deallocator, then the
2115 size and alignment. */
2116 if (gnu_obj)
2117 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2118 gnu_size, gnu_align);
2119 else
2120 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2121 gnu_size, gnu_align);
2122 }
2123
2124 /* Secondary stack case. */
2125 else
2126 {
2127 /* The size is the second parameter. */
2128 Entity_Id gnat_size_type
2129 = Etype (Next_Formal (First_Formal (gnat_proc)));
2130 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2131
2132 gnu_size = convert (gnu_size_type, gnu_size);
2133
2134 /* The first arg is the address of the object, for a deallocator,
2135 then the size. */
2136 if (gnu_obj)
2137 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2138 else
2139 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2140 }
2141
2142 return gnu_call;
2143 }
2144
2145 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2146 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2147 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2148 latter offers. */
2149
2150 static inline tree
2151 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2152 {
2153 /* When the DATA_TYPE alignment is stricter than what malloc offers
2154 (super-aligned case), we allocate an "aligning" wrapper type and return
2155 the address of its single data field with the malloc's return value
2156 stored just in front. */
2157
2158 unsigned int data_align = TYPE_ALIGN (data_type);
2159 unsigned int system_allocator_alignment
2160 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2161
2162 tree aligning_type
2163 = ((data_align > system_allocator_alignment)
2164 ? make_aligning_type (data_type, data_align, data_size,
2165 system_allocator_alignment,
2166 POINTER_SIZE / BITS_PER_UNIT,
2167 gnat_node)
2168 : NULL_TREE);
2169
2170 tree size_to_malloc
2171 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2172
2173 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2174
2175 if (aligning_type)
2176 {
2177 /* Latch malloc's return value and get a pointer to the aligning field
2178 first. */
2179 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2180
2181 tree aligning_record_addr
2182 = convert (build_pointer_type (aligning_type), storage_ptr);
2183
2184 tree aligning_record
2185 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2186
2187 tree aligning_field
2188 = build_component_ref (aligning_record, NULL_TREE,
2189 TYPE_FIELDS (aligning_type), false);
2190
2191 tree aligning_field_addr
2192 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2193
2194 /* Then arrange to store the allocator's return value ahead
2195 and return. */
2196 tree storage_ptr_slot_addr
2197 = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
2198 convert (ptr_void_type_node, aligning_field_addr),
2199 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2200 / BITS_PER_UNIT));
2201
2202 tree storage_ptr_slot
2203 = build_unary_op (INDIRECT_REF, NULL_TREE,
2204 convert (build_pointer_type (ptr_void_type_node),
2205 storage_ptr_slot_addr));
2206
2207 return
2208 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2209 build_binary_op (INIT_EXPR, NULL_TREE,
2210 storage_ptr_slot, storage_ptr),
2211 aligning_field_addr);
2212 }
2213 else
2214 return malloc_ptr;
2215 }
2216
2217 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2218 designated by DATA_PTR using the __gnat_free entry point. */
2219
2220 static inline tree
2221 maybe_wrap_free (tree data_ptr, tree data_type)
2222 {
2223 /* In the regular alignment case, we pass the data pointer straight to free.
2224 In the superaligned case, we need to retrieve the initial allocator
2225 return value, stored in front of the data block at allocation time. */
2226
2227 unsigned int data_align = TYPE_ALIGN (data_type);
2228 unsigned int system_allocator_alignment
2229 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2230
2231 tree free_ptr;
2232
2233 if (data_align > system_allocator_alignment)
2234 {
2235 /* DATA_FRONT_PTR (void *)
2236 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2237 tree data_front_ptr
2238 = build_binary_op
2239 (POINTER_PLUS_EXPR, ptr_void_type_node,
2240 convert (ptr_void_type_node, data_ptr),
2241 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2242
2243 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2244 free_ptr
2245 = build_unary_op
2246 (INDIRECT_REF, NULL_TREE,
2247 convert (build_pointer_type (ptr_void_type_node), data_front_ptr));
2248 }
2249 else
2250 free_ptr = data_ptr;
2251
2252 return build_call_n_expr (free_decl, 1, free_ptr);
2253 }
2254
2255 /* Build a GCC tree to call an allocation or deallocation function.
2256 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2257 generate an allocator.
2258
2259 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2260 object type, used to determine the to-be-honored address alignment.
2261 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2262 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2263 to provide an error location for restriction violation messages. */
2264
2265 tree
2266 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2267 Entity_Id gnat_proc, Entity_Id gnat_pool,
2268 Node_Id gnat_node)
2269 {
2270 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2271
2272 /* Explicit proc to call ? This one is assumed to deal with the type
2273 alignment constraints. */
2274 if (Present (gnat_proc))
2275 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2276 gnat_proc, gnat_pool);
2277
2278 /* Otherwise, object to "free" or "malloc" with possible special processing
2279 for alignments stricter than what the default allocator honors. */
2280 else if (gnu_obj)
2281 return maybe_wrap_free (gnu_obj, gnu_type);
2282 else
2283 {
2284 /* Assert that we no longer can be called with this special pool. */
2285 gcc_assert (gnat_pool != -1);
2286
2287 /* Check that we aren't violating the associated restriction. */
2288 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2289 Check_No_Implicit_Heap_Alloc (gnat_node);
2290
2291 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2292 }
2293 }
2294 \f
2295 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2296 initial value is INIT, if INIT is nonzero. Convert the expression to
2297 RESULT_TYPE, which must be some pointer type, and return the result.
2298
2299 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2300 the storage pool to use. GNAT_NODE is used to provide an error
2301 location for restriction violation messages. If IGNORE_INIT_TYPE is
2302 true, ignore the type of INIT for the purpose of determining the size;
2303 this will cause the maximum size to be allocated if TYPE is of
2304 self-referential size. */
2305
2306 tree
2307 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2308 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2309 {
2310 tree size, storage, storage_deref, storage_init;
2311
2312 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2313 if (init && TREE_CODE (init) == NULL_EXPR)
2314 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2315
2316 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2317 else if (init && TREE_CODE (init) == COND_EXPR)
2318 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2319 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2320 gnat_proc, gnat_pool, gnat_node,
2321 ignore_init_type),
2322 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2323 gnat_proc, gnat_pool, gnat_node,
2324 ignore_init_type));
2325
2326 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2327 sizes of the object and its template. Allocate the whole thing and
2328 fill in the parts that are known. */
2329 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2330 {
2331 tree storage_type
2332 = build_unc_object_type_from_ptr (result_type, type,
2333 get_identifier ("ALLOC"), false);
2334 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2335 tree storage_ptr_type = build_pointer_type (storage_type);
2336
2337 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2338 init);
2339
2340 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2341 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2342 size = size_int (-1);
2343
2344 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2345 gnat_proc, gnat_pool, gnat_node);
2346 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2347 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2348 TREE_THIS_NOTRAP (storage_deref) = 1;
2349
2350 /* If there is an initializing expression, then make a constructor for
2351 the entire object including the bounds and copy it into the object.
2352 If there is no initializing expression, just set the bounds. */
2353 if (init)
2354 {
2355 vec<constructor_elt, va_gc> *v;
2356 vec_alloc (v, 2);
2357
2358 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2359 build_template (template_type, type, init));
2360 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2361 init);
2362 storage_init
2363 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2364 gnat_build_constructor (storage_type, v));
2365 }
2366 else
2367 storage_init
2368 = build_binary_op (INIT_EXPR, NULL_TREE,
2369 build_component_ref (storage_deref, NULL_TREE,
2370 TYPE_FIELDS (storage_type),
2371 false),
2372 build_template (template_type, type, NULL_TREE));
2373
2374 return build2 (COMPOUND_EXPR, result_type,
2375 storage_init, convert (result_type, storage));
2376 }
2377
2378 size = TYPE_SIZE_UNIT (type);
2379
2380 /* If we have an initializing expression, see if its size is simpler
2381 than the size from the type. */
2382 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2383 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2384 || CONTAINS_PLACEHOLDER_P (size)))
2385 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2386
2387 /* If the size is still self-referential, reference the initializing
2388 expression, if it is present. If not, this must have been a
2389 call to allocate a library-level object, in which case we use
2390 the maximum size. */
2391 if (CONTAINS_PLACEHOLDER_P (size))
2392 {
2393 if (!ignore_init_type && init)
2394 size = substitute_placeholder_in_expr (size, init);
2395 else
2396 size = max_size (size, true);
2397 }
2398
2399 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2400 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2401 size = size_int (-1);
2402
2403 storage = convert (result_type,
2404 build_call_alloc_dealloc (NULL_TREE, size, type,
2405 gnat_proc, gnat_pool,
2406 gnat_node));
2407
2408 /* If we have an initial value, protect the new address, assign the value
2409 and return the address with a COMPOUND_EXPR. */
2410 if (init)
2411 {
2412 storage = gnat_protect_expr (storage);
2413 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2414 TREE_THIS_NOTRAP (storage_deref) = 1;
2415 storage_init
2416 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2417 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2418 }
2419
2420 return storage;
2421 }
2422 \f
2423 /* Indicate that we need to take the address of T and that it therefore
2424 should not be allocated in a register. Returns true if successful. */
2425
2426 bool
2427 gnat_mark_addressable (tree t)
2428 {
2429 while (true)
2430 switch (TREE_CODE (t))
2431 {
2432 case ADDR_EXPR:
2433 case COMPONENT_REF:
2434 case ARRAY_REF:
2435 case ARRAY_RANGE_REF:
2436 case REALPART_EXPR:
2437 case IMAGPART_EXPR:
2438 case VIEW_CONVERT_EXPR:
2439 case NON_LVALUE_EXPR:
2440 CASE_CONVERT:
2441 t = TREE_OPERAND (t, 0);
2442 break;
2443
2444 case COMPOUND_EXPR:
2445 t = TREE_OPERAND (t, 1);
2446 break;
2447
2448 case CONSTRUCTOR:
2449 TREE_ADDRESSABLE (t) = 1;
2450 return true;
2451
2452 case VAR_DECL:
2453 case PARM_DECL:
2454 case RESULT_DECL:
2455 TREE_ADDRESSABLE (t) = 1;
2456 return true;
2457
2458 case FUNCTION_DECL:
2459 TREE_ADDRESSABLE (t) = 1;
2460 return true;
2461
2462 case CONST_DECL:
2463 return DECL_CONST_CORRESPONDING_VAR (t)
2464 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2465
2466 default:
2467 return true;
2468 }
2469 }
2470 \f
2471 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2472 but we know how to handle our own nodes. */
2473
2474 tree
2475 gnat_save_expr (tree exp)
2476 {
2477 tree type = TREE_TYPE (exp);
2478 enum tree_code code = TREE_CODE (exp);
2479
2480 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2481 return exp;
2482
2483 if (code == UNCONSTRAINED_ARRAY_REF)
2484 {
2485 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2486 TREE_READONLY (t) = TYPE_READONLY (type);
2487 return t;
2488 }
2489
2490 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2491 This may be more efficient, but will also allow us to more easily find
2492 the match for the PLACEHOLDER_EXPR. */
2493 if (code == COMPONENT_REF
2494 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2495 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2496 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2497
2498 return save_expr (exp);
2499 }
2500
2501 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2502 is optimized under the assumption that EXP's value doesn't change before
2503 its subsequent reuse(s) except through its potential reevaluation. */
2504
2505 tree
2506 gnat_protect_expr (tree exp)
2507 {
2508 tree type = TREE_TYPE (exp);
2509 enum tree_code code = TREE_CODE (exp);
2510
2511 if (TREE_CONSTANT (exp) || code == SAVE_EXPR || code == NULL_EXPR)
2512 return exp;
2513
2514 /* If EXP has no side effects, we theoretically don't need to do anything.
2515 However, we may be recursively passed more and more complex expressions
2516 involving checks which will be reused multiple times and eventually be
2517 unshared for gimplification; in order to avoid a complexity explosion
2518 at that point, we protect any expressions more complex than a simple
2519 arithmetic expression. */
2520 if (!TREE_SIDE_EFFECTS (exp))
2521 {
2522 tree inner = skip_simple_arithmetic (exp);
2523 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2524 return exp;
2525 }
2526
2527 /* If this is a conversion, protect what's inside the conversion. */
2528 if (code == NON_LVALUE_EXPR
2529 || CONVERT_EXPR_CODE_P (code)
2530 || code == VIEW_CONVERT_EXPR)
2531 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2532
2533 /* If we're indirectly referencing something, we only need to protect the
2534 address since the data itself can't change in these situations. */
2535 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2536 {
2537 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2538 TREE_READONLY (t) = TYPE_READONLY (type);
2539 return t;
2540 }
2541
2542 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2543 This may be more efficient, but will also allow us to more easily find
2544 the match for the PLACEHOLDER_EXPR. */
2545 if (code == COMPONENT_REF
2546 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2547 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2548 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2549
2550 /* If this is a fat pointer or something that can be placed in a register,
2551 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2552 returned via invisible reference in most ABIs so the temporary will
2553 directly be filled by the callee. */
2554 if (TYPE_IS_FAT_POINTER_P (type)
2555 || TYPE_MODE (type) != BLKmode
2556 || code == CALL_EXPR)
2557 return save_expr (exp);
2558
2559 /* Otherwise reference, protect the address and dereference. */
2560 return
2561 build_unary_op (INDIRECT_REF, type,
2562 save_expr (build_unary_op (ADDR_EXPR,
2563 build_reference_type (type),
2564 exp)));
2565 }
2566
2567 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2568 argument to force evaluation of everything. */
2569
2570 static tree
2571 gnat_stabilize_reference_1 (tree e, bool force)
2572 {
2573 enum tree_code code = TREE_CODE (e);
2574 tree type = TREE_TYPE (e);
2575 tree result;
2576
2577 /* We cannot ignore const expressions because it might be a reference
2578 to a const array but whose index contains side-effects. But we can
2579 ignore things that are actual constant or that already have been
2580 handled by this function. */
2581 if (TREE_CONSTANT (e) || code == SAVE_EXPR)
2582 return e;
2583
2584 switch (TREE_CODE_CLASS (code))
2585 {
2586 case tcc_exceptional:
2587 case tcc_declaration:
2588 case tcc_comparison:
2589 case tcc_expression:
2590 case tcc_reference:
2591 case tcc_vl_exp:
2592 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2593 fat pointer. This may be more efficient, but will also allow
2594 us to more easily find the match for the PLACEHOLDER_EXPR. */
2595 if (code == COMPONENT_REF
2596 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2597 result
2598 = build3 (code, type,
2599 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2600 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2601 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2602 so that it will only be evaluated once. */
2603 /* The tcc_reference and tcc_comparison classes could be handled as
2604 below, but it is generally faster to only evaluate them once. */
2605 else if (TREE_SIDE_EFFECTS (e) || force)
2606 return save_expr (e);
2607 else
2608 return e;
2609 break;
2610
2611 case tcc_binary:
2612 /* Recursively stabilize each operand. */
2613 result
2614 = build2 (code, type,
2615 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force),
2616 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), force));
2617 break;
2618
2619 case tcc_unary:
2620 /* Recursively stabilize each operand. */
2621 result
2622 = build1 (code, type,
2623 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), force));
2624 break;
2625
2626 default:
2627 gcc_unreachable ();
2628 }
2629
2630 /* See similar handling in gnat_stabilize_reference. */
2631 TREE_READONLY (result) = TREE_READONLY (e);
2632 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2633 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2634
2635 if (code == INDIRECT_REF
2636 || code == UNCONSTRAINED_ARRAY_REF
2637 || code == ARRAY_REF
2638 || code == ARRAY_RANGE_REF)
2639 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (e);
2640
2641 return result;
2642 }
2643
2644 /* This is equivalent to stabilize_reference in tree.c but we know how to
2645 handle our own nodes and we take extra arguments. FORCE says whether to
2646 force evaluation of everything. We set SUCCESS to true unless we walk
2647 through something we don't know how to stabilize. */
2648
2649 tree
2650 gnat_stabilize_reference (tree ref, bool force, bool *success)
2651 {
2652 tree type = TREE_TYPE (ref);
2653 enum tree_code code = TREE_CODE (ref);
2654 tree result;
2655
2656 /* Assume we'll success unless proven otherwise. */
2657 if (success)
2658 *success = true;
2659
2660 switch (code)
2661 {
2662 case CONST_DECL:
2663 case VAR_DECL:
2664 case PARM_DECL:
2665 case RESULT_DECL:
2666 /* No action is needed in this case. */
2667 return ref;
2668
2669 case ADDR_EXPR:
2670 CASE_CONVERT:
2671 case FLOAT_EXPR:
2672 case FIX_TRUNC_EXPR:
2673 case VIEW_CONVERT_EXPR:
2674 result
2675 = build1 (code, type,
2676 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2677 success));
2678 break;
2679
2680 case INDIRECT_REF:
2681 case UNCONSTRAINED_ARRAY_REF:
2682 result = build1 (code, type,
2683 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 0),
2684 force));
2685 break;
2686
2687 case COMPONENT_REF:
2688 result = build3 (COMPONENT_REF, type,
2689 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2690 success),
2691 TREE_OPERAND (ref, 1), NULL_TREE);
2692 break;
2693
2694 case BIT_FIELD_REF:
2695 result = build3 (BIT_FIELD_REF, type,
2696 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2697 success),
2698 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2699 break;
2700
2701 case ARRAY_REF:
2702 case ARRAY_RANGE_REF:
2703 result = build4 (code, type,
2704 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2705 success),
2706 gnat_stabilize_reference_1 (TREE_OPERAND (ref, 1),
2707 force),
2708 NULL_TREE, NULL_TREE);
2709 break;
2710
2711 case CALL_EXPR:
2712 result = gnat_stabilize_reference_1 (ref, force);
2713 break;
2714
2715 case COMPOUND_EXPR:
2716 result = build2 (COMPOUND_EXPR, type,
2717 gnat_stabilize_reference (TREE_OPERAND (ref, 0), force,
2718 success),
2719 gnat_stabilize_reference (TREE_OPERAND (ref, 1), force,
2720 success));
2721 break;
2722
2723 case CONSTRUCTOR:
2724 /* Constructors with 1 element are used extensively to formally
2725 convert objects to special wrapping types. */
2726 if (TREE_CODE (type) == RECORD_TYPE
2727 && vec_safe_length (CONSTRUCTOR_ELTS (ref)) == 1)
2728 {
2729 tree index = (*CONSTRUCTOR_ELTS (ref))[0].index;
2730 tree value = (*CONSTRUCTOR_ELTS (ref))[0].value;
2731 result
2732 = build_constructor_single (type, index,
2733 gnat_stabilize_reference_1 (value,
2734 force));
2735 }
2736 else
2737 {
2738 if (success)
2739 *success = false;
2740 return ref;
2741 }
2742 break;
2743
2744 case ERROR_MARK:
2745 ref = error_mark_node;
2746
2747 /* ... fall through to failure ... */
2748
2749 /* If arg isn't a kind of lvalue we recognize, make no change.
2750 Caller should recognize the error for an invalid lvalue. */
2751 default:
2752 if (success)
2753 *success = false;
2754 return ref;
2755 }
2756
2757 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2758 may not be sustained across some paths, such as the way via build1 for
2759 INDIRECT_REF. We reset those flags here in the general case, which is
2760 consistent with the GCC version of this routine.
2761
2762 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2763 paths introduce side-effects where there was none initially (e.g. if a
2764 SAVE_EXPR is built) and we also want to keep track of that. */
2765 TREE_READONLY (result) = TREE_READONLY (ref);
2766 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2767 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2768
2769 if (code == INDIRECT_REF
2770 || code == UNCONSTRAINED_ARRAY_REF
2771 || code == ARRAY_REF
2772 || code == ARRAY_RANGE_REF)
2773 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2774
2775 return result;
2776 }
2777
2778 /* If EXPR is an expression that is invariant in the current function, in the
2779 sense that it can be evaluated anywhere in the function and any number of
2780 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2781
2782 tree
2783 gnat_invariant_expr (tree expr)
2784 {
2785 tree type = TREE_TYPE (expr), t;
2786
2787 expr = remove_conversions (expr, false);
2788
2789 while ((TREE_CODE (expr) == CONST_DECL
2790 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2791 && decl_function_context (expr) == current_function_decl
2792 && DECL_INITIAL (expr))
2793 {
2794 expr = DECL_INITIAL (expr);
2795 /* Look into CONSTRUCTORs built to initialize padded types. */
2796 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2797 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2798 expr = remove_conversions (expr, false);
2799 }
2800
2801 if (TREE_CONSTANT (expr))
2802 return fold_convert (type, expr);
2803
2804 t = expr;
2805
2806 while (true)
2807 {
2808 switch (TREE_CODE (t))
2809 {
2810 case COMPONENT_REF:
2811 if (TREE_OPERAND (t, 2) != NULL_TREE)
2812 return NULL_TREE;
2813 break;
2814
2815 case ARRAY_REF:
2816 case ARRAY_RANGE_REF:
2817 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2818 || TREE_OPERAND (t, 2) != NULL_TREE
2819 || TREE_OPERAND (t, 3) != NULL_TREE)
2820 return NULL_TREE;
2821 break;
2822
2823 case BIT_FIELD_REF:
2824 case VIEW_CONVERT_EXPR:
2825 case REALPART_EXPR:
2826 case IMAGPART_EXPR:
2827 break;
2828
2829 case INDIRECT_REF:
2830 if (!TREE_READONLY (t)
2831 || TREE_SIDE_EFFECTS (t)
2832 || !TREE_THIS_NOTRAP (t))
2833 return NULL_TREE;
2834 break;
2835
2836 default:
2837 goto object;
2838 }
2839
2840 t = TREE_OPERAND (t, 0);
2841 }
2842
2843 object:
2844 if (TREE_SIDE_EFFECTS (t))
2845 return NULL_TREE;
2846
2847 if (TREE_CODE (t) == CONST_DECL
2848 && (DECL_EXTERNAL (t)
2849 || decl_function_context (t) != current_function_decl))
2850 return fold_convert (type, expr);
2851
2852 if (!TREE_READONLY (t))
2853 return NULL_TREE;
2854
2855 if (TREE_CODE (t) == PARM_DECL)
2856 return fold_convert (type, expr);
2857
2858 if (TREE_CODE (t) == VAR_DECL
2859 && (DECL_EXTERNAL (t)
2860 || decl_function_context (t) != current_function_decl))
2861 return fold_convert (type, expr);
2862
2863 return NULL_TREE;
2864 }