]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/gcc-interface/utils2.c
tree-core.h: Include symtab.h.
[thirdparty/gcc.git] / gcc / ada / gcc-interface / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * Copyright (C) 1992-2015, Free Software Foundation, Inc. *
10 * *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
20 * *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
23 * *
24 ****************************************************************************/
25
26 #include "config.h"
27 #include "system.h"
28 #include "coretypes.h"
29 #include "tm.h"
30 #include "vec.h"
31 #include "alias.h"
32 #include "tree.h"
33 #include "inchash.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
36 #include "stringpool.h"
37 #include "varasm.h"
38 #include "flags.h"
39 #include "toplev.h"
40 #include "ggc.h"
41 #include "tree-inline.h"
42
43 #include "ada.h"
44 #include "types.h"
45 #include "atree.h"
46 #include "elists.h"
47 #include "namet.h"
48 #include "nlists.h"
49 #include "snames.h"
50 #include "stringt.h"
51 #include "uintp.h"
52 #include "fe.h"
53 #include "sinfo.h"
54 #include "einfo.h"
55 #include "ada-tree.h"
56 #include "gigi.h"
57
58 /* Return the base type of TYPE. */
59
60 tree
61 get_base_type (tree type)
62 {
63 if (TREE_CODE (type) == RECORD_TYPE
64 && TYPE_JUSTIFIED_MODULAR_P (type))
65 type = TREE_TYPE (TYPE_FIELDS (type));
66
67 while (TREE_TYPE (type)
68 && (TREE_CODE (type) == INTEGER_TYPE
69 || TREE_CODE (type) == REAL_TYPE))
70 type = TREE_TYPE (type);
71
72 return type;
73 }
74 \f
75 /* EXP is a GCC tree representing an address. See if we can find how strictly
76 the object at this address is aligned and, if so, return the alignment of
77 the object in bits. Otherwise return 0. */
78
79 unsigned int
80 known_alignment (tree exp)
81 {
82 unsigned int this_alignment;
83 unsigned int lhs, rhs;
84
85 switch (TREE_CODE (exp))
86 {
87 CASE_CONVERT:
88 case VIEW_CONVERT_EXPR:
89 case NON_LVALUE_EXPR:
90 /* Conversions between pointers and integers don't change the alignment
91 of the underlying object. */
92 this_alignment = known_alignment (TREE_OPERAND (exp, 0));
93 break;
94
95 case COMPOUND_EXPR:
96 /* The value of a COMPOUND_EXPR is that of its second operand. */
97 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
98 break;
99
100 case PLUS_EXPR:
101 case MINUS_EXPR:
102 /* If two addresses are added, the alignment of the result is the
103 minimum of the two alignments. */
104 lhs = known_alignment (TREE_OPERAND (exp, 0));
105 rhs = known_alignment (TREE_OPERAND (exp, 1));
106 this_alignment = MIN (lhs, rhs);
107 break;
108
109 case POINTER_PLUS_EXPR:
110 /* If this is the pattern built for aligning types, decode it. */
111 if (TREE_CODE (TREE_OPERAND (exp, 1)) == BIT_AND_EXPR
112 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 1), 0)) == NEGATE_EXPR)
113 {
114 tree op = TREE_OPERAND (TREE_OPERAND (exp, 1), 1);
115 return
116 known_alignment (fold_build1 (BIT_NOT_EXPR, TREE_TYPE (op), op));
117 }
118
119 /* If we don't know the alignment of the offset, we assume that
120 of the base. */
121 lhs = known_alignment (TREE_OPERAND (exp, 0));
122 rhs = known_alignment (TREE_OPERAND (exp, 1));
123
124 if (rhs == 0)
125 this_alignment = lhs;
126 else
127 this_alignment = MIN (lhs, rhs);
128 break;
129
130 case COND_EXPR:
131 /* If there is a choice between two values, use the smaller one. */
132 lhs = known_alignment (TREE_OPERAND (exp, 1));
133 rhs = known_alignment (TREE_OPERAND (exp, 2));
134 this_alignment = MIN (lhs, rhs);
135 break;
136
137 case INTEGER_CST:
138 {
139 unsigned HOST_WIDE_INT c = TREE_INT_CST_LOW (exp);
140 /* The first part of this represents the lowest bit in the constant,
141 but it is originally in bytes, not bits. */
142 this_alignment = (c & -c) * BITS_PER_UNIT;
143 }
144 break;
145
146 case MULT_EXPR:
147 /* If we know the alignment of just one side, use it. Otherwise,
148 use the product of the alignments. */
149 lhs = known_alignment (TREE_OPERAND (exp, 0));
150 rhs = known_alignment (TREE_OPERAND (exp, 1));
151
152 if (lhs == 0)
153 this_alignment = rhs;
154 else if (rhs == 0)
155 this_alignment = lhs;
156 else
157 this_alignment = MIN (lhs * rhs, BIGGEST_ALIGNMENT);
158 break;
159
160 case BIT_AND_EXPR:
161 /* A bit-and expression is as aligned as the maximum alignment of the
162 operands. We typically get here for a complex lhs and a constant
163 negative power of two on the rhs to force an explicit alignment, so
164 don't bother looking at the lhs. */
165 this_alignment = known_alignment (TREE_OPERAND (exp, 1));
166 break;
167
168 case ADDR_EXPR:
169 this_alignment = expr_align (TREE_OPERAND (exp, 0));
170 break;
171
172 case CALL_EXPR:
173 {
174 tree t = maybe_inline_call_in_expr (exp);
175 if (t)
176 return known_alignment (t);
177 }
178
179 /* ... fall through ... */
180
181 default:
182 /* For other pointer expressions, we assume that the pointed-to object
183 is at least as aligned as the pointed-to type. Beware that we can
184 have a dummy type here (e.g. a Taft Amendment type), for which the
185 alignment is meaningless and should be ignored. */
186 if (POINTER_TYPE_P (TREE_TYPE (exp))
187 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp))))
188 this_alignment = TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp)));
189 else
190 this_alignment = 0;
191 break;
192 }
193
194 return this_alignment;
195 }
196 \f
197 /* We have a comparison or assignment operation on two types, T1 and T2, which
198 are either both array types or both record types. T1 is assumed to be for
199 the left hand side operand, and T2 for the right hand side. Return the
200 type that both operands should be converted to for the operation, if any.
201 Otherwise return zero. */
202
203 static tree
204 find_common_type (tree t1, tree t2)
205 {
206 /* ??? As of today, various constructs lead to here with types of different
207 sizes even when both constants (e.g. tagged types, packable vs regular
208 component types, padded vs unpadded types, ...). While some of these
209 would better be handled upstream (types should be made consistent before
210 calling into build_binary_op), some others are really expected and we
211 have to be careful. */
212
213 /* We must avoid writing more than what the target can hold if this is for
214 an assignment and the case of tagged types is handled in build_binary_op
215 so we use the lhs type if it is known to be smaller or of constant size
216 and the rhs type is not, whatever the modes. We also force t1 in case of
217 constant size equality to minimize occurrences of view conversions on the
218 lhs of an assignment, except for the case of record types with a variant
219 part on the lhs but not on the rhs to make the conversion simpler. */
220 if (TREE_CONSTANT (TYPE_SIZE (t1))
221 && (!TREE_CONSTANT (TYPE_SIZE (t2))
222 || tree_int_cst_lt (TYPE_SIZE (t1), TYPE_SIZE (t2))
223 || (TYPE_SIZE (t1) == TYPE_SIZE (t2)
224 && !(TREE_CODE (t1) == RECORD_TYPE
225 && TREE_CODE (t2) == RECORD_TYPE
226 && get_variant_part (t1) != NULL_TREE
227 && get_variant_part (t2) == NULL_TREE))))
228 return t1;
229
230 /* Otherwise, if the lhs type is non-BLKmode, use it. Note that we know
231 that we will not have any alignment problems since, if we did, the
232 non-BLKmode type could not have been used. */
233 if (TYPE_MODE (t1) != BLKmode)
234 return t1;
235
236 /* If the rhs type is of constant size, use it whatever the modes. At
237 this point it is known to be smaller, or of constant size and the
238 lhs type is not. */
239 if (TREE_CONSTANT (TYPE_SIZE (t2)))
240 return t2;
241
242 /* Otherwise, if the rhs type is non-BLKmode, use it. */
243 if (TYPE_MODE (t2) != BLKmode)
244 return t2;
245
246 /* In this case, both types have variable size and BLKmode. It's
247 probably best to leave the "type mismatch" because changing it
248 could cause a bad self-referential reference. */
249 return NULL_TREE;
250 }
251 \f
252 /* Return an expression tree representing an equality comparison of A1 and A2,
253 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
254
255 Two arrays are equal in one of two ways: (1) if both have zero length in
256 some dimension (not necessarily the same dimension) or (2) if the lengths
257 in each dimension are equal and the data is equal. We perform the length
258 tests in as efficient a manner as possible. */
259
260 static tree
261 compare_arrays (location_t loc, tree result_type, tree a1, tree a2)
262 {
263 tree result = convert (result_type, boolean_true_node);
264 tree a1_is_null = convert (result_type, boolean_false_node);
265 tree a2_is_null = convert (result_type, boolean_false_node);
266 tree t1 = TREE_TYPE (a1);
267 tree t2 = TREE_TYPE (a2);
268 bool a1_side_effects_p = TREE_SIDE_EFFECTS (a1);
269 bool a2_side_effects_p = TREE_SIDE_EFFECTS (a2);
270 bool length_zero_p = false;
271
272 /* If the operands have side-effects, they need to be evaluated only once
273 in spite of the multiple references in the comparison. */
274 if (a1_side_effects_p)
275 a1 = gnat_protect_expr (a1);
276
277 if (a2_side_effects_p)
278 a2 = gnat_protect_expr (a2);
279
280 /* Process each dimension separately and compare the lengths. If any
281 dimension has a length known to be zero, set LENGTH_ZERO_P to true
282 in order to suppress the comparison of the data at the end. */
283 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
284 {
285 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
286 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
287 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
288 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
289 tree length1 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub1, lb1),
290 size_one_node);
291 tree length2 = size_binop (PLUS_EXPR, size_binop (MINUS_EXPR, ub2, lb2),
292 size_one_node);
293 tree comparison, this_a1_is_null, this_a2_is_null;
294
295 /* If the length of the first array is a constant, swap our operands
296 unless the length of the second array is the constant zero. */
297 if (TREE_CODE (length1) == INTEGER_CST && !integer_zerop (length2))
298 {
299 tree tem;
300 bool btem;
301
302 tem = a1, a1 = a2, a2 = tem;
303 tem = t1, t1 = t2, t2 = tem;
304 tem = lb1, lb1 = lb2, lb2 = tem;
305 tem = ub1, ub1 = ub2, ub2 = tem;
306 tem = length1, length1 = length2, length2 = tem;
307 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
308 btem = a1_side_effects_p, a1_side_effects_p = a2_side_effects_p,
309 a2_side_effects_p = btem;
310 }
311
312 /* If the length of the second array is the constant zero, we can just
313 use the original stored bounds for the first array and see whether
314 last < first holds. */
315 if (integer_zerop (length2))
316 {
317 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
318
319 length_zero_p = true;
320
321 ub1
322 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
323 lb1
324 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
325
326 comparison = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
327 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
328 if (EXPR_P (comparison))
329 SET_EXPR_LOCATION (comparison, loc);
330
331 this_a1_is_null = comparison;
332 this_a2_is_null = convert (result_type, boolean_true_node);
333 }
334
335 /* Otherwise, if the length is some other constant value, we know that
336 this dimension in the second array cannot be superflat, so we can
337 just use its length computed from the actual stored bounds. */
338 else if (TREE_CODE (length2) == INTEGER_CST)
339 {
340 tree b = get_base_type (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
341
342 ub1
343 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
344 lb1
345 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1))));
346 /* Note that we know that UB2 and LB2 are constant and hence
347 cannot contain a PLACEHOLDER_EXPR. */
348 ub2
349 = convert (b, TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
350 lb2
351 = convert (b, TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2))));
352
353 comparison
354 = fold_build2_loc (loc, EQ_EXPR, result_type,
355 build_binary_op (MINUS_EXPR, b, ub1, lb1),
356 build_binary_op (MINUS_EXPR, b, ub2, lb2));
357 comparison = SUBSTITUTE_PLACEHOLDER_IN_EXPR (comparison, a1);
358 if (EXPR_P (comparison))
359 SET_EXPR_LOCATION (comparison, loc);
360
361 this_a1_is_null
362 = fold_build2_loc (loc, LT_EXPR, result_type, ub1, lb1);
363
364 this_a2_is_null = convert (result_type, boolean_false_node);
365 }
366
367 /* Otherwise, compare the computed lengths. */
368 else
369 {
370 length1 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1, a1);
371 length2 = SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2, a2);
372
373 comparison
374 = fold_build2_loc (loc, EQ_EXPR, result_type, length1, length2);
375
376 /* If the length expression is of the form (cond ? val : 0), assume
377 that cond is equivalent to (length != 0). That's guaranteed by
378 construction of the array types in gnat_to_gnu_entity. */
379 if (TREE_CODE (length1) == COND_EXPR
380 && integer_zerop (TREE_OPERAND (length1, 2)))
381 this_a1_is_null
382 = invert_truthvalue_loc (loc, TREE_OPERAND (length1, 0));
383 else
384 this_a1_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
385 length1, size_zero_node);
386
387 /* Likewise for the second array. */
388 if (TREE_CODE (length2) == COND_EXPR
389 && integer_zerop (TREE_OPERAND (length2, 2)))
390 this_a2_is_null
391 = invert_truthvalue_loc (loc, TREE_OPERAND (length2, 0));
392 else
393 this_a2_is_null = fold_build2_loc (loc, EQ_EXPR, result_type,
394 length2, size_zero_node);
395 }
396
397 /* Append expressions for this dimension to the final expressions. */
398 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
399 result, comparison);
400
401 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
402 this_a1_is_null, a1_is_null);
403
404 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
405 this_a2_is_null, a2_is_null);
406
407 t1 = TREE_TYPE (t1);
408 t2 = TREE_TYPE (t2);
409 }
410
411 /* Unless the length of some dimension is known to be zero, compare the
412 data in the array. */
413 if (!length_zero_p)
414 {
415 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
416 tree comparison;
417
418 if (type)
419 {
420 a1 = convert (type, a1),
421 a2 = convert (type, a2);
422 }
423
424 comparison = fold_build2_loc (loc, EQ_EXPR, result_type, a1, a2);
425
426 result
427 = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result, comparison);
428 }
429
430 /* The result is also true if both sizes are zero. */
431 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
433 a1_is_null, a2_is_null),
434 result);
435
436 /* If the operands have side-effects, they need to be evaluated before
437 doing the tests above since the place they otherwise would end up
438 being evaluated at run time could be wrong. */
439 if (a1_side_effects_p)
440 result = build2 (COMPOUND_EXPR, result_type, a1, result);
441
442 if (a2_side_effects_p)
443 result = build2 (COMPOUND_EXPR, result_type, a2, result);
444
445 return result;
446 }
447
448 /* Return an expression tree representing an equality comparison of P1 and P2,
449 two objects of fat pointer type. The result should be of type RESULT_TYPE.
450
451 Two fat pointers are equal in one of two ways: (1) if both have a null
452 pointer to the array or (2) if they contain the same couple of pointers.
453 We perform the comparison in as efficient a manner as possible. */
454
455 static tree
456 compare_fat_pointers (location_t loc, tree result_type, tree p1, tree p2)
457 {
458 tree p1_array, p2_array, p1_bounds, p2_bounds, same_array, same_bounds;
459 tree p1_array_is_null, p2_array_is_null;
460
461 /* If either operand has side-effects, they have to be evaluated only once
462 in spite of the multiple references to the operand in the comparison. */
463 p1 = gnat_protect_expr (p1);
464 p2 = gnat_protect_expr (p2);
465
466 /* The constant folder doesn't fold fat pointer types so we do it here. */
467 if (TREE_CODE (p1) == CONSTRUCTOR)
468 p1_array = CONSTRUCTOR_ELT (p1, 0)->value;
469 else
470 p1_array = build_component_ref (p1, NULL_TREE,
471 TYPE_FIELDS (TREE_TYPE (p1)), true);
472
473 p1_array_is_null
474 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array,
475 fold_convert_loc (loc, TREE_TYPE (p1_array),
476 null_pointer_node));
477
478 if (TREE_CODE (p2) == CONSTRUCTOR)
479 p2_array = CONSTRUCTOR_ELT (p2, 0)->value;
480 else
481 p2_array = build_component_ref (p2, NULL_TREE,
482 TYPE_FIELDS (TREE_TYPE (p2)), true);
483
484 p2_array_is_null
485 = fold_build2_loc (loc, EQ_EXPR, result_type, p2_array,
486 fold_convert_loc (loc, TREE_TYPE (p2_array),
487 null_pointer_node));
488
489 /* If one of the pointers to the array is null, just compare the other. */
490 if (integer_zerop (p1_array))
491 return p2_array_is_null;
492 else if (integer_zerop (p2_array))
493 return p1_array_is_null;
494
495 /* Otherwise, do the fully-fledged comparison. */
496 same_array
497 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_array, p2_array);
498
499 if (TREE_CODE (p1) == CONSTRUCTOR)
500 p1_bounds = CONSTRUCTOR_ELT (p1, 1)->value;
501 else
502 p1_bounds
503 = build_component_ref (p1, NULL_TREE,
504 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1))), true);
505
506 if (TREE_CODE (p2) == CONSTRUCTOR)
507 p2_bounds = CONSTRUCTOR_ELT (p2, 1)->value;
508 else
509 p2_bounds
510 = build_component_ref (p2, NULL_TREE,
511 DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2))), true);
512
513 same_bounds
514 = fold_build2_loc (loc, EQ_EXPR, result_type, p1_bounds, p2_bounds);
515
516 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
517 return build_binary_op (TRUTH_ANDIF_EXPR, result_type, same_array,
518 build_binary_op (TRUTH_ORIF_EXPR, result_type,
519 p1_array_is_null, same_bounds));
520 }
521 \f
522 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
523 type TYPE. We know that TYPE is a modular type with a nonbinary
524 modulus. */
525
526 static tree
527 nonbinary_modular_operation (enum tree_code op_code, tree type, tree lhs,
528 tree rhs)
529 {
530 tree modulus = TYPE_MODULUS (type);
531 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
532 unsigned int precision;
533 bool unsignedp = true;
534 tree op_type = type;
535 tree result;
536
537 /* If this is an addition of a constant, convert it to a subtraction
538 of a constant since we can do that faster. */
539 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
540 {
541 rhs = fold_build2 (MINUS_EXPR, type, modulus, rhs);
542 op_code = MINUS_EXPR;
543 }
544
545 /* For the logical operations, we only need PRECISION bits. For
546 addition and subtraction, we need one more and for multiplication we
547 need twice as many. But we never want to make a size smaller than
548 our size. */
549 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
550 needed_precision += 1;
551 else if (op_code == MULT_EXPR)
552 needed_precision *= 2;
553
554 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
555
556 /* Unsigned will do for everything but subtraction. */
557 if (op_code == MINUS_EXPR)
558 unsignedp = false;
559
560 /* If our type is the wrong signedness or isn't wide enough, make a new
561 type and convert both our operands to it. */
562 if (TYPE_PRECISION (op_type) < precision
563 || TYPE_UNSIGNED (op_type) != unsignedp)
564 {
565 /* Copy the node so we ensure it can be modified to make it modular. */
566 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
567 modulus = convert (op_type, modulus);
568 SET_TYPE_MODULUS (op_type, modulus);
569 TYPE_MODULAR_P (op_type) = 1;
570 lhs = convert (op_type, lhs);
571 rhs = convert (op_type, rhs);
572 }
573
574 /* Do the operation, then we'll fix it up. */
575 result = fold_build2 (op_code, op_type, lhs, rhs);
576
577 /* For multiplication, we have no choice but to do a full modulus
578 operation. However, we want to do this in the narrowest
579 possible size. */
580 if (op_code == MULT_EXPR)
581 {
582 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
583 modulus = convert (div_type, modulus);
584 SET_TYPE_MODULUS (div_type, modulus);
585 TYPE_MODULAR_P (div_type) = 1;
586 result = convert (op_type,
587 fold_build2 (TRUNC_MOD_EXPR, div_type,
588 convert (div_type, result), modulus));
589 }
590
591 /* For subtraction, add the modulus back if we are negative. */
592 else if (op_code == MINUS_EXPR)
593 {
594 result = gnat_protect_expr (result);
595 result = fold_build3 (COND_EXPR, op_type,
596 fold_build2 (LT_EXPR, boolean_type_node, result,
597 convert (op_type, integer_zero_node)),
598 fold_build2 (PLUS_EXPR, op_type, result, modulus),
599 result);
600 }
601
602 /* For the other operations, subtract the modulus if we are >= it. */
603 else
604 {
605 result = gnat_protect_expr (result);
606 result = fold_build3 (COND_EXPR, op_type,
607 fold_build2 (GE_EXPR, boolean_type_node,
608 result, modulus),
609 fold_build2 (MINUS_EXPR, op_type,
610 result, modulus),
611 result);
612 }
613
614 return convert (type, result);
615 }
616 \f
617 /* This page contains routines that implement the Ada semantics with regard
618 to atomic objects. They are fully piggybacked on the middle-end support
619 for atomic loads and stores.
620
621 *** Memory barriers and volatile objects ***
622
623 We implement the weakened form of the C.6(16) clause that was introduced
624 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
625 implementable without significant performance hits on modern platforms.
626
627 We also take advantage of the requirements imposed on shared variables by
628 9.10 (conditions for sequential actions) to have non-erroneous execution
629 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
630 volatile updates with regard to sequential actions, i.e. with regard to
631 reads or updates of atomic objects.
632
633 As such, an update of an atomic object by a task requires that all earlier
634 accesses to volatile objects have completed. Similarly, later accesses to
635 volatile objects cannot be reordered before the update of the atomic object.
636 So, memory barriers both before and after the atomic update are needed.
637
638 For a read of an atomic object, to avoid seeing writes of volatile objects
639 by a task earlier than by the other tasks, a memory barrier is needed before
640 the atomic read. Finally, to avoid reordering later reads or updates of
641 volatile objects to before the atomic read, a barrier is needed after the
642 atomic read.
643
644 So, memory barriers are needed before and after atomic reads and updates.
645 And, in order to simplify the implementation, we use full memory barriers
646 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
647
648 /* Return the size of TYPE, which must be a positive power of 2. */
649
650 static unsigned int
651 resolve_atomic_size (tree type)
652 {
653 unsigned HOST_WIDE_INT size = tree_to_uhwi (TYPE_SIZE_UNIT (type));
654
655 if (size == 1 || size == 2 || size == 4 || size == 8 || size == 16)
656 return size;
657
658 /* We shouldn't reach here without having already detected that the size
659 isn't compatible with an atomic access. */
660 gcc_assert (Serious_Errors_Detected);
661
662 return 0;
663 }
664
665 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
666 true if the load requires synchronization. */
667
668 tree
669 build_atomic_load (tree src, bool sync)
670 {
671 tree ptr_type
672 = build_pointer_type
673 (build_qualified_type (void_type_node,
674 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
675 tree mem_model
676 = build_int_cst (integer_type_node,
677 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
678 tree orig_src = src;
679 tree t, addr, val;
680 unsigned int size;
681 int fncode;
682
683 /* Remove conversions to get the address of the underlying object. */
684 src = remove_conversions (src, false);
685 size = resolve_atomic_size (TREE_TYPE (src));
686 if (size == 0)
687 return orig_src;
688
689 fncode = (int) BUILT_IN_ATOMIC_LOAD_N + exact_log2 (size) + 1;
690 t = builtin_decl_implicit ((enum built_in_function) fncode);
691
692 addr = build_unary_op (ADDR_EXPR, ptr_type, src);
693 val = build_call_expr (t, 2, addr, mem_model);
694
695 /* First reinterpret the loaded bits in the original type of the load,
696 then convert to the expected result type. */
697 t = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (src), val);
698 return convert (TREE_TYPE (orig_src), t);
699 }
700
701 /* Build an atomic store from SRC to the underlying atomic object in DEST.
702 SYNC is true if the store requires synchronization. */
703
704 tree
705 build_atomic_store (tree dest, tree src, bool sync)
706 {
707 tree ptr_type
708 = build_pointer_type
709 (build_qualified_type (void_type_node,
710 TYPE_QUAL_ATOMIC | TYPE_QUAL_VOLATILE));
711 tree mem_model
712 = build_int_cst (integer_type_node,
713 sync ? MEMMODEL_SEQ_CST : MEMMODEL_RELAXED);
714 tree orig_dest = dest;
715 tree t, int_type, addr;
716 unsigned int size;
717 int fncode;
718
719 /* Remove conversions to get the address of the underlying object. */
720 dest = remove_conversions (dest, false);
721 size = resolve_atomic_size (TREE_TYPE (dest));
722 if (size == 0)
723 return build_binary_op (MODIFY_EXPR, NULL_TREE, orig_dest, src);
724
725 fncode = (int) BUILT_IN_ATOMIC_STORE_N + exact_log2 (size) + 1;
726 t = builtin_decl_implicit ((enum built_in_function) fncode);
727 int_type = gnat_type_for_size (BITS_PER_UNIT * size, 1);
728
729 /* First convert the bits to be stored to the original type of the store,
730 then reinterpret them in the effective type. But if the original type
731 is a padded type with the same size, convert to the inner type instead,
732 as we don't want to artificially introduce a CONSTRUCTOR here. */
733 if (TYPE_IS_PADDING_P (TREE_TYPE (dest))
734 && TYPE_SIZE (TREE_TYPE (dest))
735 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest)))))
736 src = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest))), src);
737 else
738 src = convert (TREE_TYPE (dest), src);
739 src = fold_build1 (VIEW_CONVERT_EXPR, int_type, src);
740 addr = build_unary_op (ADDR_EXPR, ptr_type, dest);
741
742 return build_call_expr (t, 3, addr, src, mem_model);
743 }
744
745 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
746 the location of the sequence. Note that, even though the load and the store
747 are both atomic, the sequence itself is not atomic. */
748
749 tree
750 build_load_modify_store (tree dest, tree src, Node_Id gnat_node)
751 {
752 /* We will be modifying DEST below so we build a copy. */
753 dest = copy_node (dest);
754 tree ref = dest;
755
756 while (handled_component_p (ref))
757 {
758 /* The load should already have been generated during the translation
759 of the GNAT destination tree; find it out in the GNU tree. */
760 if (TREE_CODE (TREE_OPERAND (ref, 0)) == VIEW_CONVERT_EXPR)
761 {
762 tree op = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
763 if (TREE_CODE (op) == CALL_EXPR && call_is_atomic_load (op))
764 {
765 tree type = TREE_TYPE (TREE_OPERAND (ref, 0));
766 tree t = CALL_EXPR_ARG (op, 0);
767 tree obj, temp, stmt;
768
769 /* Find out the loaded object. */
770 if (TREE_CODE (t) == NOP_EXPR)
771 t = TREE_OPERAND (t, 0);
772 if (TREE_CODE (t) == ADDR_EXPR)
773 obj = TREE_OPERAND (t, 0);
774 else
775 obj = build1 (INDIRECT_REF, type, t);
776
777 /* Drop atomic and volatile qualifiers for the temporary. */
778 type = TYPE_MAIN_VARIANT (type);
779
780 /* And drop BLKmode, if need be, to put it into a register. */
781 if (TYPE_MODE (type) == BLKmode)
782 {
783 unsigned int size = tree_to_uhwi (TYPE_SIZE (type));
784 type = copy_type (type);
785 SET_TYPE_MODE (type, mode_for_size (size, MODE_INT, 0));
786 }
787
788 /* Create the temporary by inserting a SAVE_EXPR. */
789 temp = build1 (SAVE_EXPR, type,
790 build1 (VIEW_CONVERT_EXPR, type, op));
791 TREE_OPERAND (ref, 0) = temp;
792
793 start_stmt_group ();
794
795 /* Build the modify of the temporary. */
796 stmt = build_binary_op (MODIFY_EXPR, NULL_TREE, dest, src);
797 add_stmt_with_node (stmt, gnat_node);
798
799 /* Build the store to the object. */
800 stmt = build_atomic_store (obj, temp, false);
801 add_stmt_with_node (stmt, gnat_node);
802
803 return end_stmt_group ();
804 }
805 }
806
807 TREE_OPERAND (ref, 0) = copy_node (TREE_OPERAND (ref, 0));
808 ref = TREE_OPERAND (ref, 0);
809 }
810
811 /* Something went wrong earlier if we have not found the atomic load. */
812 gcc_unreachable ();
813 }
814 \f
815 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
816 desired for the result. Usually the operation is to be performed
817 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
818 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
819 case the type to be used will be derived from the operands.
820
821 This function is very much unlike the ones for C and C++ since we
822 have already done any type conversion and matching required. All we
823 have to do here is validate the work done by SEM and handle subtypes. */
824
825 tree
826 build_binary_op (enum tree_code op_code, tree result_type,
827 tree left_operand, tree right_operand)
828 {
829 tree left_type = TREE_TYPE (left_operand);
830 tree right_type = TREE_TYPE (right_operand);
831 tree left_base_type = get_base_type (left_type);
832 tree right_base_type = get_base_type (right_type);
833 tree operation_type = result_type;
834 tree best_type = NULL_TREE;
835 tree modulus, result;
836 bool has_side_effects = false;
837
838 if (operation_type
839 && TREE_CODE (operation_type) == RECORD_TYPE
840 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
841 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
842
843 if (operation_type
844 && TREE_CODE (operation_type) == INTEGER_TYPE
845 && TYPE_EXTRA_SUBTYPE_P (operation_type))
846 operation_type = get_base_type (operation_type);
847
848 modulus = (operation_type
849 && TREE_CODE (operation_type) == INTEGER_TYPE
850 && TYPE_MODULAR_P (operation_type)
851 ? TYPE_MODULUS (operation_type) : NULL_TREE);
852
853 switch (op_code)
854 {
855 case INIT_EXPR:
856 case MODIFY_EXPR:
857 #ifdef ENABLE_CHECKING
858 gcc_assert (result_type == NULL_TREE);
859 #endif
860 /* If there were integral or pointer conversions on the LHS, remove
861 them; we'll be putting them back below if needed. Likewise for
862 conversions between array and record types, except for justified
863 modular types. But don't do this if the right operand is not
864 BLKmode (for packed arrays) unless we are not changing the mode. */
865 while ((CONVERT_EXPR_P (left_operand)
866 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
867 && (((INTEGRAL_TYPE_P (left_type)
868 || POINTER_TYPE_P (left_type))
869 && (INTEGRAL_TYPE_P (TREE_TYPE
870 (TREE_OPERAND (left_operand, 0)))
871 || POINTER_TYPE_P (TREE_TYPE
872 (TREE_OPERAND (left_operand, 0)))))
873 || (((TREE_CODE (left_type) == RECORD_TYPE
874 && !TYPE_JUSTIFIED_MODULAR_P (left_type))
875 || TREE_CODE (left_type) == ARRAY_TYPE)
876 && ((TREE_CODE (TREE_TYPE
877 (TREE_OPERAND (left_operand, 0)))
878 == RECORD_TYPE)
879 || (TREE_CODE (TREE_TYPE
880 (TREE_OPERAND (left_operand, 0)))
881 == ARRAY_TYPE))
882 && (TYPE_MODE (right_type) == BLKmode
883 || (TYPE_MODE (left_type)
884 == TYPE_MODE (TREE_TYPE
885 (TREE_OPERAND
886 (left_operand, 0))))))))
887 {
888 left_operand = TREE_OPERAND (left_operand, 0);
889 left_type = TREE_TYPE (left_operand);
890 }
891
892 /* If a class-wide type may be involved, force use of the RHS type. */
893 if ((TREE_CODE (right_type) == RECORD_TYPE
894 || TREE_CODE (right_type) == UNION_TYPE)
895 && TYPE_ALIGN_OK (right_type))
896 operation_type = right_type;
897
898 /* If we are copying between padded objects with compatible types, use
899 the padded view of the objects, this is very likely more efficient.
900 Likewise for a padded object that is assigned a constructor, if we
901 can convert the constructor to the inner type, to avoid putting a
902 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
903 actually copied anything. */
904 else if (TYPE_IS_PADDING_P (left_type)
905 && TREE_CONSTANT (TYPE_SIZE (left_type))
906 && ((TREE_CODE (right_operand) == COMPONENT_REF
907 && TYPE_MAIN_VARIANT (left_type)
908 == TYPE_MAIN_VARIANT
909 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
910 || (TREE_CODE (right_operand) == CONSTRUCTOR
911 && !CONTAINS_PLACEHOLDER_P
912 (DECL_SIZE (TYPE_FIELDS (left_type)))))
913 && !integer_zerop (TYPE_SIZE (right_type)))
914 {
915 /* We make an exception for a BLKmode type padding a non-BLKmode
916 inner type and do the conversion of the LHS right away, since
917 unchecked_convert wouldn't do it properly. */
918 if (TYPE_MODE (left_type) == BLKmode
919 && TYPE_MODE (right_type) != BLKmode
920 && TREE_CODE (right_operand) != CONSTRUCTOR)
921 {
922 operation_type = right_type;
923 left_operand = convert (operation_type, left_operand);
924 left_type = operation_type;
925 }
926 else
927 operation_type = left_type;
928 }
929
930 /* If we have a call to a function that returns with variable size, use
931 the RHS type in case we want to use the return slot optimization. */
932 else if (TREE_CODE (right_operand) == CALL_EXPR
933 && return_type_with_variable_size_p (right_type))
934 operation_type = right_type;
935
936 /* Find the best type to use for copying between aggregate types. */
937 else if (((TREE_CODE (left_type) == ARRAY_TYPE
938 && TREE_CODE (right_type) == ARRAY_TYPE)
939 || (TREE_CODE (left_type) == RECORD_TYPE
940 && TREE_CODE (right_type) == RECORD_TYPE))
941 && (best_type = find_common_type (left_type, right_type)))
942 operation_type = best_type;
943
944 /* Otherwise use the LHS type. */
945 else
946 operation_type = left_type;
947
948 /* Ensure everything on the LHS is valid. If we have a field reference,
949 strip anything that get_inner_reference can handle. Then remove any
950 conversions between types having the same code and mode. And mark
951 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
952 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
953 result = left_operand;
954 while (true)
955 {
956 tree restype = TREE_TYPE (result);
957
958 if (TREE_CODE (result) == COMPONENT_REF
959 || TREE_CODE (result) == ARRAY_REF
960 || TREE_CODE (result) == ARRAY_RANGE_REF)
961 while (handled_component_p (result))
962 result = TREE_OPERAND (result, 0);
963 else if (TREE_CODE (result) == REALPART_EXPR
964 || TREE_CODE (result) == IMAGPART_EXPR
965 || (CONVERT_EXPR_P (result)
966 && (((TREE_CODE (restype)
967 == TREE_CODE (TREE_TYPE
968 (TREE_OPERAND (result, 0))))
969 && (TYPE_MODE (TREE_TYPE
970 (TREE_OPERAND (result, 0)))
971 == TYPE_MODE (restype)))
972 || TYPE_ALIGN_OK (restype))))
973 result = TREE_OPERAND (result, 0);
974 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
975 {
976 TREE_ADDRESSABLE (result) = 1;
977 result = TREE_OPERAND (result, 0);
978 }
979 else
980 break;
981 }
982
983 gcc_assert (TREE_CODE (result) == INDIRECT_REF
984 || TREE_CODE (result) == NULL_EXPR
985 || TREE_CODE (result) == SAVE_EXPR
986 || DECL_P (result));
987
988 /* Convert the right operand to the operation type unless it is
989 either already of the correct type or if the type involves a
990 placeholder, since the RHS may not have the same record type. */
991 if (operation_type != right_type
992 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type)))
993 {
994 right_operand = convert (operation_type, right_operand);
995 right_type = operation_type;
996 }
997
998 /* If the left operand is not of the same type as the operation
999 type, wrap it up in a VIEW_CONVERT_EXPR. */
1000 if (left_type != operation_type)
1001 left_operand = unchecked_convert (operation_type, left_operand, false);
1002
1003 has_side_effects = true;
1004 modulus = NULL_TREE;
1005 break;
1006
1007 case ARRAY_REF:
1008 if (!operation_type)
1009 operation_type = TREE_TYPE (left_type);
1010
1011 /* ... fall through ... */
1012
1013 case ARRAY_RANGE_REF:
1014 /* First look through conversion between type variants. Note that
1015 this changes neither the operation type nor the type domain. */
1016 if (TREE_CODE (left_operand) == VIEW_CONVERT_EXPR
1017 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand, 0)))
1018 == TYPE_MAIN_VARIANT (left_type))
1019 {
1020 left_operand = TREE_OPERAND (left_operand, 0);
1021 left_type = TREE_TYPE (left_operand);
1022 }
1023
1024 /* For a range, make sure the element type is consistent. */
1025 if (op_code == ARRAY_RANGE_REF
1026 && TREE_TYPE (operation_type) != TREE_TYPE (left_type))
1027 operation_type = build_array_type (TREE_TYPE (left_type),
1028 TYPE_DOMAIN (operation_type));
1029
1030 /* Then convert the right operand to its base type. This will prevent
1031 unneeded sign conversions when sizetype is wider than integer. */
1032 right_operand = convert (right_base_type, right_operand);
1033 right_operand = convert_to_index_type (right_operand);
1034 modulus = NULL_TREE;
1035 break;
1036
1037 case TRUTH_ANDIF_EXPR:
1038 case TRUTH_ORIF_EXPR:
1039 case TRUTH_AND_EXPR:
1040 case TRUTH_OR_EXPR:
1041 case TRUTH_XOR_EXPR:
1042 #ifdef ENABLE_CHECKING
1043 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1044 #endif
1045 operation_type = left_base_type;
1046 left_operand = convert (operation_type, left_operand);
1047 right_operand = convert (operation_type, right_operand);
1048 break;
1049
1050 case GE_EXPR:
1051 case LE_EXPR:
1052 case GT_EXPR:
1053 case LT_EXPR:
1054 case EQ_EXPR:
1055 case NE_EXPR:
1056 #ifdef ENABLE_CHECKING
1057 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1058 #endif
1059 /* If either operand is a NULL_EXPR, just return a new one. */
1060 if (TREE_CODE (left_operand) == NULL_EXPR)
1061 return build2 (op_code, result_type,
1062 build1 (NULL_EXPR, integer_type_node,
1063 TREE_OPERAND (left_operand, 0)),
1064 integer_zero_node);
1065
1066 else if (TREE_CODE (right_operand) == NULL_EXPR)
1067 return build2 (op_code, result_type,
1068 build1 (NULL_EXPR, integer_type_node,
1069 TREE_OPERAND (right_operand, 0)),
1070 integer_zero_node);
1071
1072 /* If either object is a justified modular types, get the
1073 fields from within. */
1074 if (TREE_CODE (left_type) == RECORD_TYPE
1075 && TYPE_JUSTIFIED_MODULAR_P (left_type))
1076 {
1077 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
1078 left_operand);
1079 left_type = TREE_TYPE (left_operand);
1080 left_base_type = get_base_type (left_type);
1081 }
1082
1083 if (TREE_CODE (right_type) == RECORD_TYPE
1084 && TYPE_JUSTIFIED_MODULAR_P (right_type))
1085 {
1086 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
1087 right_operand);
1088 right_type = TREE_TYPE (right_operand);
1089 right_base_type = get_base_type (right_type);
1090 }
1091
1092 /* If both objects are arrays, compare them specially. */
1093 if ((TREE_CODE (left_type) == ARRAY_TYPE
1094 || (TREE_CODE (left_type) == INTEGER_TYPE
1095 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
1096 && (TREE_CODE (right_type) == ARRAY_TYPE
1097 || (TREE_CODE (right_type) == INTEGER_TYPE
1098 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
1099 {
1100 result = compare_arrays (input_location,
1101 result_type, left_operand, right_operand);
1102 if (op_code == NE_EXPR)
1103 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1104 else
1105 gcc_assert (op_code == EQ_EXPR);
1106
1107 return result;
1108 }
1109
1110 /* Otherwise, the base types must be the same, unless they are both fat
1111 pointer types or record types. In the latter case, use the best type
1112 and convert both operands to that type. */
1113 if (left_base_type != right_base_type)
1114 {
1115 if (TYPE_IS_FAT_POINTER_P (left_base_type)
1116 && TYPE_IS_FAT_POINTER_P (right_base_type))
1117 {
1118 gcc_assert (TYPE_MAIN_VARIANT (left_base_type)
1119 == TYPE_MAIN_VARIANT (right_base_type));
1120 best_type = left_base_type;
1121 }
1122
1123 else if (TREE_CODE (left_base_type) == RECORD_TYPE
1124 && TREE_CODE (right_base_type) == RECORD_TYPE)
1125 {
1126 /* The only way this is permitted is if both types have the same
1127 name. In that case, one of them must not be self-referential.
1128 Use it as the best type. Even better with a fixed size. */
1129 gcc_assert (TYPE_NAME (left_base_type)
1130 && TYPE_NAME (left_base_type)
1131 == TYPE_NAME (right_base_type));
1132
1133 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
1134 best_type = left_base_type;
1135 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
1136 best_type = right_base_type;
1137 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type)))
1138 best_type = left_base_type;
1139 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type)))
1140 best_type = right_base_type;
1141 else
1142 gcc_unreachable ();
1143 }
1144
1145 else if (POINTER_TYPE_P (left_base_type)
1146 && POINTER_TYPE_P (right_base_type))
1147 {
1148 gcc_assert (TREE_TYPE (left_base_type)
1149 == TREE_TYPE (right_base_type));
1150 best_type = left_base_type;
1151 }
1152 else
1153 gcc_unreachable ();
1154
1155 left_operand = convert (best_type, left_operand);
1156 right_operand = convert (best_type, right_operand);
1157 }
1158 else
1159 {
1160 left_operand = convert (left_base_type, left_operand);
1161 right_operand = convert (right_base_type, right_operand);
1162 }
1163
1164 /* If both objects are fat pointers, compare them specially. */
1165 if (TYPE_IS_FAT_POINTER_P (left_base_type))
1166 {
1167 result
1168 = compare_fat_pointers (input_location,
1169 result_type, left_operand, right_operand);
1170 if (op_code == NE_EXPR)
1171 result = invert_truthvalue_loc (EXPR_LOCATION (result), result);
1172 else
1173 gcc_assert (op_code == EQ_EXPR);
1174
1175 return result;
1176 }
1177
1178 modulus = NULL_TREE;
1179 break;
1180
1181 case LSHIFT_EXPR:
1182 case RSHIFT_EXPR:
1183 case LROTATE_EXPR:
1184 case RROTATE_EXPR:
1185 /* The RHS of a shift can be any type. Also, ignore any modulus
1186 (we used to abort, but this is needed for unchecked conversion
1187 to modular types). Otherwise, processing is the same as normal. */
1188 gcc_assert (operation_type == left_base_type);
1189 modulus = NULL_TREE;
1190 left_operand = convert (operation_type, left_operand);
1191 break;
1192
1193 case BIT_AND_EXPR:
1194 case BIT_IOR_EXPR:
1195 case BIT_XOR_EXPR:
1196 /* For binary modulus, if the inputs are in range, so are the
1197 outputs. */
1198 if (modulus && integer_pow2p (modulus))
1199 modulus = NULL_TREE;
1200 goto common;
1201
1202 case COMPLEX_EXPR:
1203 gcc_assert (TREE_TYPE (result_type) == left_base_type
1204 && TREE_TYPE (result_type) == right_base_type);
1205 left_operand = convert (left_base_type, left_operand);
1206 right_operand = convert (right_base_type, right_operand);
1207 break;
1208
1209 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1210 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1211 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1212 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1213 /* These always produce results lower than either operand. */
1214 modulus = NULL_TREE;
1215 goto common;
1216
1217 case POINTER_PLUS_EXPR:
1218 gcc_assert (operation_type == left_base_type
1219 && sizetype == right_base_type);
1220 left_operand = convert (operation_type, left_operand);
1221 right_operand = convert (sizetype, right_operand);
1222 break;
1223
1224 case PLUS_NOMOD_EXPR:
1225 case MINUS_NOMOD_EXPR:
1226 if (op_code == PLUS_NOMOD_EXPR)
1227 op_code = PLUS_EXPR;
1228 else
1229 op_code = MINUS_EXPR;
1230 modulus = NULL_TREE;
1231
1232 /* ... fall through ... */
1233
1234 case PLUS_EXPR:
1235 case MINUS_EXPR:
1236 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1237 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1238 these types but can generate addition/subtraction for Succ/Pred. */
1239 if (operation_type
1240 && (TREE_CODE (operation_type) == ENUMERAL_TYPE
1241 || TREE_CODE (operation_type) == BOOLEAN_TYPE))
1242 operation_type = left_base_type = right_base_type
1243 = gnat_type_for_mode (TYPE_MODE (operation_type),
1244 TYPE_UNSIGNED (operation_type));
1245
1246 /* ... fall through ... */
1247
1248 default:
1249 common:
1250 /* The result type should be the same as the base types of the
1251 both operands (and they should be the same). Convert
1252 everything to the result type. */
1253
1254 gcc_assert (operation_type == left_base_type
1255 && left_base_type == right_base_type);
1256 left_operand = convert (operation_type, left_operand);
1257 right_operand = convert (operation_type, right_operand);
1258 }
1259
1260 if (modulus && !integer_pow2p (modulus))
1261 {
1262 result = nonbinary_modular_operation (op_code, operation_type,
1263 left_operand, right_operand);
1264 modulus = NULL_TREE;
1265 }
1266 /* If either operand is a NULL_EXPR, just return a new one. */
1267 else if (TREE_CODE (left_operand) == NULL_EXPR)
1268 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1269 else if (TREE_CODE (right_operand) == NULL_EXPR)
1270 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1271 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1272 result = fold (build4 (op_code, operation_type, left_operand,
1273 right_operand, NULL_TREE, NULL_TREE));
1274 else if (op_code == INIT_EXPR || op_code == MODIFY_EXPR)
1275 result = build2 (op_code, void_type_node, left_operand, right_operand);
1276 else
1277 result
1278 = fold_build2 (op_code, operation_type, left_operand, right_operand);
1279
1280 if (TREE_CONSTANT (result))
1281 ;
1282 else if (op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1283 {
1284 if (TYPE_VOLATILE (operation_type))
1285 TREE_THIS_VOLATILE (result) = 1;
1286 }
1287 else
1288 TREE_CONSTANT (result)
1289 |= (TREE_CONSTANT (left_operand) && TREE_CONSTANT (right_operand));
1290
1291 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1292
1293 /* If we are working with modular types, perform the MOD operation
1294 if something above hasn't eliminated the need for it. */
1295 if (modulus)
1296 result = fold_build2 (FLOOR_MOD_EXPR, operation_type, result,
1297 convert (operation_type, modulus));
1298
1299 if (result_type && result_type != operation_type)
1300 result = convert (result_type, result);
1301
1302 return result;
1303 }
1304 \f
1305 /* Similar, but for unary operations. */
1306
1307 tree
1308 build_unary_op (enum tree_code op_code, tree result_type, tree operand)
1309 {
1310 tree type = TREE_TYPE (operand);
1311 tree base_type = get_base_type (type);
1312 tree operation_type = result_type;
1313 tree result;
1314
1315 if (operation_type
1316 && TREE_CODE (operation_type) == RECORD_TYPE
1317 && TYPE_JUSTIFIED_MODULAR_P (operation_type))
1318 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1319
1320 if (operation_type
1321 && TREE_CODE (operation_type) == INTEGER_TYPE
1322 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1323 operation_type = get_base_type (operation_type);
1324
1325 switch (op_code)
1326 {
1327 case REALPART_EXPR:
1328 case IMAGPART_EXPR:
1329 if (!operation_type)
1330 result_type = operation_type = TREE_TYPE (type);
1331 else
1332 gcc_assert (result_type == TREE_TYPE (type));
1333
1334 result = fold_build1 (op_code, operation_type, operand);
1335 break;
1336
1337 case TRUTH_NOT_EXPR:
1338 #ifdef ENABLE_CHECKING
1339 gcc_assert (TREE_CODE (get_base_type (result_type)) == BOOLEAN_TYPE);
1340 #endif
1341 result = invert_truthvalue_loc (EXPR_LOCATION (operand), operand);
1342 /* When not optimizing, fold the result as invert_truthvalue_loc
1343 doesn't fold the result of comparisons. This is intended to undo
1344 the trick used for boolean rvalues in gnat_to_gnu. */
1345 if (!optimize)
1346 result = fold (result);
1347 break;
1348
1349 case ATTR_ADDR_EXPR:
1350 case ADDR_EXPR:
1351 switch (TREE_CODE (operand))
1352 {
1353 case INDIRECT_REF:
1354 case UNCONSTRAINED_ARRAY_REF:
1355 result = TREE_OPERAND (operand, 0);
1356
1357 /* Make sure the type here is a pointer, not a reference.
1358 GCC wants pointer types for function addresses. */
1359 if (!result_type)
1360 result_type = build_pointer_type (type);
1361
1362 /* If the underlying object can alias everything, propagate the
1363 property since we are effectively retrieving the object. */
1364 if (POINTER_TYPE_P (TREE_TYPE (result))
1365 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result)))
1366 {
1367 if (TREE_CODE (result_type) == POINTER_TYPE
1368 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1369 result_type
1370 = build_pointer_type_for_mode (TREE_TYPE (result_type),
1371 TYPE_MODE (result_type),
1372 true);
1373 else if (TREE_CODE (result_type) == REFERENCE_TYPE
1374 && !TYPE_REF_CAN_ALIAS_ALL (result_type))
1375 result_type
1376 = build_reference_type_for_mode (TREE_TYPE (result_type),
1377 TYPE_MODE (result_type),
1378 true);
1379 }
1380 break;
1381
1382 case NULL_EXPR:
1383 result = operand;
1384 TREE_TYPE (result) = type = build_pointer_type (type);
1385 break;
1386
1387 case COMPOUND_EXPR:
1388 /* Fold a compound expression if it has unconstrained array type
1389 since the middle-end cannot handle it. But we don't it in the
1390 general case because it may introduce aliasing issues if the
1391 first operand is an indirect assignment and the second operand
1392 the corresponding address, e.g. for an allocator. */
1393 if (TREE_CODE (type) == UNCONSTRAINED_ARRAY_TYPE)
1394 {
1395 result = build_unary_op (ADDR_EXPR, result_type,
1396 TREE_OPERAND (operand, 1));
1397 result = build2 (COMPOUND_EXPR, TREE_TYPE (result),
1398 TREE_OPERAND (operand, 0), result);
1399 break;
1400 }
1401 goto common;
1402
1403 case ARRAY_REF:
1404 case ARRAY_RANGE_REF:
1405 case COMPONENT_REF:
1406 case BIT_FIELD_REF:
1407 /* If this is for 'Address, find the address of the prefix and add
1408 the offset to the field. Otherwise, do this the normal way. */
1409 if (op_code == ATTR_ADDR_EXPR)
1410 {
1411 HOST_WIDE_INT bitsize;
1412 HOST_WIDE_INT bitpos;
1413 tree offset, inner;
1414 machine_mode mode;
1415 int unsignedp, volatilep;
1416
1417 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1418 &mode, &unsignedp, &volatilep,
1419 false);
1420
1421 /* If INNER is a padding type whose field has a self-referential
1422 size, convert to that inner type. We know the offset is zero
1423 and we need to have that type visible. */
1424 if (type_is_padding_self_referential (TREE_TYPE (inner)))
1425 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1426 inner);
1427
1428 /* Compute the offset as a byte offset from INNER. */
1429 if (!offset)
1430 offset = size_zero_node;
1431
1432 offset = size_binop (PLUS_EXPR, offset,
1433 size_int (bitpos / BITS_PER_UNIT));
1434
1435 /* Take the address of INNER, convert it to a pointer to our type
1436 and add the offset. */
1437 inner = build_unary_op (ADDR_EXPR,
1438 build_pointer_type (TREE_TYPE (operand)),
1439 inner);
1440 result = build_binary_op (POINTER_PLUS_EXPR, TREE_TYPE (inner),
1441 inner, offset);
1442 break;
1443 }
1444 goto common;
1445
1446 case CONSTRUCTOR:
1447 /* If this is just a constructor for a padded record, we can
1448 just take the address of the single field and convert it to
1449 a pointer to our type. */
1450 if (TYPE_IS_PADDING_P (type))
1451 {
1452 result
1453 = build_unary_op (ADDR_EXPR,
1454 build_pointer_type (TREE_TYPE (operand)),
1455 CONSTRUCTOR_ELT (operand, 0)->value);
1456 break;
1457 }
1458 goto common;
1459
1460 case NOP_EXPR:
1461 if (AGGREGATE_TYPE_P (type)
1462 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1463 return build_unary_op (ADDR_EXPR, result_type,
1464 TREE_OPERAND (operand, 0));
1465
1466 /* ... fallthru ... */
1467
1468 case VIEW_CONVERT_EXPR:
1469 /* If this just a variant conversion or if the conversion doesn't
1470 change the mode, get the result type from this type and go down.
1471 This is needed for conversions of CONST_DECLs, to eventually get
1472 to the address of their CORRESPONDING_VARs. */
1473 if ((TYPE_MAIN_VARIANT (type)
1474 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand, 0))))
1475 || (TYPE_MODE (type) != BLKmode
1476 && (TYPE_MODE (type)
1477 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0))))))
1478 return build_unary_op (ADDR_EXPR,
1479 (result_type ? result_type
1480 : build_pointer_type (type)),
1481 TREE_OPERAND (operand, 0));
1482 goto common;
1483
1484 case CONST_DECL:
1485 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1486
1487 /* ... fall through ... */
1488
1489 default:
1490 common:
1491
1492 /* If we are taking the address of a padded record whose field
1493 contains a template, take the address of the field. */
1494 if (TYPE_IS_PADDING_P (type)
1495 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type))) == RECORD_TYPE
1496 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type))))
1497 {
1498 type = TREE_TYPE (TYPE_FIELDS (type));
1499 operand = convert (type, operand);
1500 }
1501
1502 gnat_mark_addressable (operand);
1503 result = build_fold_addr_expr (operand);
1504 }
1505
1506 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1507 break;
1508
1509 case INDIRECT_REF:
1510 {
1511 tree t = remove_conversions (operand, false);
1512 bool can_never_be_null = DECL_P (t) && DECL_CAN_NEVER_BE_NULL_P (t);
1513
1514 /* If TYPE is a thin pointer, either first retrieve the base if this
1515 is an expression with an offset built for the initialization of an
1516 object with an unconstrained nominal subtype, or else convert to
1517 the fat pointer. */
1518 if (TYPE_IS_THIN_POINTER_P (type))
1519 {
1520 tree rec_type = TREE_TYPE (type);
1521
1522 if (TREE_CODE (operand) == POINTER_PLUS_EXPR
1523 && TREE_OPERAND (operand, 1)
1524 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type)))
1525 && TREE_CODE (TREE_OPERAND (operand, 0)) == NOP_EXPR)
1526 {
1527 operand = TREE_OPERAND (TREE_OPERAND (operand, 0), 0);
1528 type = TREE_TYPE (operand);
1529 }
1530 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type))
1531 {
1532 operand
1533 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type)),
1534 operand);
1535 type = TREE_TYPE (operand);
1536 }
1537 }
1538
1539 /* If we want to refer to an unconstrained array, use the appropriate
1540 expression. But this will never survive down to the back-end. */
1541 if (TYPE_IS_FAT_POINTER_P (type))
1542 {
1543 result = build1 (UNCONSTRAINED_ARRAY_REF,
1544 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1545 TREE_READONLY (result)
1546 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type));
1547 }
1548
1549 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1550 else if (TREE_CODE (operand) == ADDR_EXPR)
1551 result = TREE_OPERAND (operand, 0);
1552
1553 /* Otherwise, build and fold the indirect reference. */
1554 else
1555 {
1556 result = build_fold_indirect_ref (operand);
1557 TREE_READONLY (result) = TYPE_READONLY (TREE_TYPE (type));
1558 }
1559
1560 if (!TYPE_IS_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)))
1561 {
1562 TREE_SIDE_EFFECTS (result) = 1;
1563 if (TREE_CODE (result) == INDIRECT_REF)
1564 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1565 }
1566
1567 if ((TREE_CODE (result) == INDIRECT_REF
1568 || TREE_CODE (result) == UNCONSTRAINED_ARRAY_REF)
1569 && can_never_be_null)
1570 TREE_THIS_NOTRAP (result) = 1;
1571
1572 break;
1573 }
1574
1575 case NEGATE_EXPR:
1576 case BIT_NOT_EXPR:
1577 {
1578 tree modulus = ((operation_type
1579 && TREE_CODE (operation_type) == INTEGER_TYPE
1580 && TYPE_MODULAR_P (operation_type))
1581 ? TYPE_MODULUS (operation_type) : NULL_TREE);
1582 int mod_pow2 = modulus && integer_pow2p (modulus);
1583
1584 /* If this is a modular type, there are various possibilities
1585 depending on the operation and whether the modulus is a
1586 power of two or not. */
1587
1588 if (modulus)
1589 {
1590 gcc_assert (operation_type == base_type);
1591 operand = convert (operation_type, operand);
1592
1593 /* The fastest in the negate case for binary modulus is
1594 the straightforward code; the TRUNC_MOD_EXPR below
1595 is an AND operation. */
1596 if (op_code == NEGATE_EXPR && mod_pow2)
1597 result = fold_build2 (TRUNC_MOD_EXPR, operation_type,
1598 fold_build1 (NEGATE_EXPR, operation_type,
1599 operand),
1600 modulus);
1601
1602 /* For nonbinary negate case, return zero for zero operand,
1603 else return the modulus minus the operand. If the modulus
1604 is a power of two minus one, we can do the subtraction
1605 as an XOR since it is equivalent and faster on most machines. */
1606 else if (op_code == NEGATE_EXPR && !mod_pow2)
1607 {
1608 if (integer_pow2p (fold_build2 (PLUS_EXPR, operation_type,
1609 modulus,
1610 convert (operation_type,
1611 integer_one_node))))
1612 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1613 operand, modulus);
1614 else
1615 result = fold_build2 (MINUS_EXPR, operation_type,
1616 modulus, operand);
1617
1618 result = fold_build3 (COND_EXPR, operation_type,
1619 fold_build2 (NE_EXPR,
1620 boolean_type_node,
1621 operand,
1622 convert
1623 (operation_type,
1624 integer_zero_node)),
1625 result, operand);
1626 }
1627 else
1628 {
1629 /* For the NOT cases, we need a constant equal to
1630 the modulus minus one. For a binary modulus, we
1631 XOR against the constant and subtract the operand from
1632 that constant for nonbinary modulus. */
1633
1634 tree cnst = fold_build2 (MINUS_EXPR, operation_type, modulus,
1635 convert (operation_type,
1636 integer_one_node));
1637
1638 if (mod_pow2)
1639 result = fold_build2 (BIT_XOR_EXPR, operation_type,
1640 operand, cnst);
1641 else
1642 result = fold_build2 (MINUS_EXPR, operation_type,
1643 cnst, operand);
1644 }
1645
1646 break;
1647 }
1648 }
1649
1650 /* ... fall through ... */
1651
1652 default:
1653 gcc_assert (operation_type == base_type);
1654 result = fold_build1 (op_code, operation_type,
1655 convert (operation_type, operand));
1656 }
1657
1658 if (result_type && TREE_TYPE (result) != result_type)
1659 result = convert (result_type, result);
1660
1661 return result;
1662 }
1663 \f
1664 /* Similar, but for COND_EXPR. */
1665
1666 tree
1667 build_cond_expr (tree result_type, tree condition_operand,
1668 tree true_operand, tree false_operand)
1669 {
1670 bool addr_p = false;
1671 tree result;
1672
1673 /* The front-end verified that result, true and false operands have
1674 same base type. Convert everything to the result type. */
1675 true_operand = convert (result_type, true_operand);
1676 false_operand = convert (result_type, false_operand);
1677
1678 /* If the result type is unconstrained, take the address of the operands and
1679 then dereference the result. Likewise if the result type is passed by
1680 reference, because creating a temporary of this type is not allowed. */
1681 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1682 || TYPE_IS_BY_REFERENCE_P (result_type)
1683 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1684 {
1685 result_type = build_pointer_type (result_type);
1686 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1687 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1688 addr_p = true;
1689 }
1690
1691 result = fold_build3 (COND_EXPR, result_type, condition_operand,
1692 true_operand, false_operand);
1693
1694 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1695 in both arms, make sure it gets evaluated by moving it ahead of the
1696 conditional expression. This is necessary because it is evaluated
1697 in only one place at run time and would otherwise be uninitialized
1698 in one of the arms. */
1699 true_operand = skip_simple_arithmetic (true_operand);
1700 false_operand = skip_simple_arithmetic (false_operand);
1701
1702 if (true_operand == false_operand && TREE_CODE (true_operand) == SAVE_EXPR)
1703 result = build2 (COMPOUND_EXPR, result_type, true_operand, result);
1704
1705 if (addr_p)
1706 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1707
1708 return result;
1709 }
1710
1711 /* Similar, but for COMPOUND_EXPR. */
1712
1713 tree
1714 build_compound_expr (tree result_type, tree stmt_operand, tree expr_operand)
1715 {
1716 bool addr_p = false;
1717 tree result;
1718
1719 /* If the result type is unconstrained, take the address of the operand and
1720 then dereference the result. Likewise if the result type is passed by
1721 reference, but this is natively handled in the gimplifier. */
1722 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1723 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type)))
1724 {
1725 result_type = build_pointer_type (result_type);
1726 expr_operand = build_unary_op (ADDR_EXPR, result_type, expr_operand);
1727 addr_p = true;
1728 }
1729
1730 result = fold_build2 (COMPOUND_EXPR, result_type, stmt_operand,
1731 expr_operand);
1732
1733 if (addr_p)
1734 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1735
1736 return result;
1737 }
1738 \f
1739 /* Conveniently construct a function call expression. FNDECL names the
1740 function to be called, N is the number of arguments, and the "..."
1741 parameters are the argument expressions. Unlike build_call_expr
1742 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1743
1744 tree
1745 build_call_n_expr (tree fndecl, int n, ...)
1746 {
1747 va_list ap;
1748 tree fntype = TREE_TYPE (fndecl);
1749 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
1750
1751 va_start (ap, n);
1752 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
1753 va_end (ap);
1754 return fn;
1755 }
1756 \f
1757 /* Call a function that raises an exception and pass the line number and file
1758 name, if requested. MSG says which exception function to call.
1759
1760 GNAT_NODE is the gnat node conveying the source location for which the
1761 error should be signaled, or Empty in which case the error is signaled on
1762 the current ref_file_name/input_line.
1763
1764 KIND says which kind of exception this is for
1765 (N_Raise_{Constraint,Storage,Program}_Error). */
1766
1767 tree
1768 build_call_raise (int msg, Node_Id gnat_node, char kind)
1769 {
1770 tree fndecl = gnat_raise_decls[msg];
1771 tree label = get_exception_label (kind);
1772 tree filename;
1773 int line_number;
1774 const char *str;
1775 int len;
1776
1777 /* If this is to be done as a goto, handle that case. */
1778 if (label)
1779 {
1780 Entity_Id local_raise = Get_Local_Raise_Call_Entity ();
1781 tree gnu_result = build1 (GOTO_EXPR, void_type_node, label);
1782
1783 /* If Local_Raise is present, generate
1784 Local_Raise (exception'Identity); */
1785 if (Present (local_raise))
1786 {
1787 tree gnu_local_raise
1788 = gnat_to_gnu_entity (local_raise, NULL_TREE, 0);
1789 tree gnu_exception_entity
1790 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg), NULL_TREE, 0);
1791 tree gnu_call
1792 = build_call_n_expr (gnu_local_raise, 1,
1793 build_unary_op (ADDR_EXPR, NULL_TREE,
1794 gnu_exception_entity));
1795
1796 gnu_result = build2 (COMPOUND_EXPR, void_type_node,
1797 gnu_call, gnu_result);}
1798
1799 return gnu_result;
1800 }
1801
1802 str
1803 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1804 ? ""
1805 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1806 ? IDENTIFIER_POINTER
1807 (get_identifier (Get_Name_String
1808 (Debug_Source_Name
1809 (Get_Source_File_Index (Sloc (gnat_node))))))
1810 : ref_filename;
1811
1812 len = strlen (str);
1813 filename = build_string (len, str);
1814 line_number
1815 = (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1816 ? Get_Logical_Line_Number (Sloc(gnat_node))
1817 : LOCATION_LINE (input_location);
1818
1819 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1820 build_index_type (size_int (len)));
1821
1822 return
1823 build_call_n_expr (fndecl, 2,
1824 build1 (ADDR_EXPR,
1825 build_pointer_type (unsigned_char_type_node),
1826 filename),
1827 build_int_cst (NULL_TREE, line_number));
1828 }
1829
1830 /* Similar to build_call_raise, for an index or range check exception as
1831 determined by MSG, with extra information generated of the form
1832 "INDEX out of range FIRST..LAST". */
1833
1834 tree
1835 build_call_raise_range (int msg, Node_Id gnat_node,
1836 tree index, tree first, tree last)
1837 {
1838 tree fndecl = gnat_raise_decls_ext[msg];
1839 tree filename;
1840 int line_number, column_number;
1841 const char *str;
1842 int len;
1843
1844 str
1845 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1846 ? ""
1847 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1848 ? IDENTIFIER_POINTER
1849 (get_identifier (Get_Name_String
1850 (Debug_Source_Name
1851 (Get_Source_File_Index (Sloc (gnat_node))))))
1852 : ref_filename;
1853
1854 len = strlen (str);
1855 filename = build_string (len, str);
1856 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1857 {
1858 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1859 column_number = Get_Column_Number (Sloc (gnat_node));
1860 }
1861 else
1862 {
1863 line_number = LOCATION_LINE (input_location);
1864 column_number = 0;
1865 }
1866
1867 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1868 build_index_type (size_int (len)));
1869
1870 return
1871 build_call_n_expr (fndecl, 6,
1872 build1 (ADDR_EXPR,
1873 build_pointer_type (unsigned_char_type_node),
1874 filename),
1875 build_int_cst (NULL_TREE, line_number),
1876 build_int_cst (NULL_TREE, column_number),
1877 convert (integer_type_node, index),
1878 convert (integer_type_node, first),
1879 convert (integer_type_node, last));
1880 }
1881
1882 /* Similar to build_call_raise, with extra information about the column
1883 where the check failed. */
1884
1885 tree
1886 build_call_raise_column (int msg, Node_Id gnat_node)
1887 {
1888 tree fndecl = gnat_raise_decls_ext[msg];
1889 tree filename;
1890 int line_number, column_number;
1891 const char *str;
1892 int len;
1893
1894 str
1895 = (Debug_Flag_NN || Exception_Locations_Suppressed)
1896 ? ""
1897 : (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1898 ? IDENTIFIER_POINTER
1899 (get_identifier (Get_Name_String
1900 (Debug_Source_Name
1901 (Get_Source_File_Index (Sloc (gnat_node))))))
1902 : ref_filename;
1903
1904 len = strlen (str);
1905 filename = build_string (len, str);
1906 if (gnat_node != Empty && Sloc (gnat_node) != No_Location)
1907 {
1908 line_number = Get_Logical_Line_Number (Sloc (gnat_node));
1909 column_number = Get_Column_Number (Sloc (gnat_node));
1910 }
1911 else
1912 {
1913 line_number = LOCATION_LINE (input_location);
1914 column_number = 0;
1915 }
1916
1917 TREE_TYPE (filename) = build_array_type (unsigned_char_type_node,
1918 build_index_type (size_int (len)));
1919
1920 return
1921 build_call_n_expr (fndecl, 3,
1922 build1 (ADDR_EXPR,
1923 build_pointer_type (unsigned_char_type_node),
1924 filename),
1925 build_int_cst (NULL_TREE, line_number),
1926 build_int_cst (NULL_TREE, column_number));
1927 }
1928 \f
1929 /* qsort comparer for the bit positions of two constructor elements
1930 for record components. */
1931
1932 static int
1933 compare_elmt_bitpos (const PTR rt1, const PTR rt2)
1934 {
1935 const constructor_elt * const elmt1 = (const constructor_elt * const) rt1;
1936 const constructor_elt * const elmt2 = (const constructor_elt * const) rt2;
1937 const_tree const field1 = elmt1->index;
1938 const_tree const field2 = elmt2->index;
1939 const int ret
1940 = tree_int_cst_compare (bit_position (field1), bit_position (field2));
1941
1942 return ret ? ret : (int) (DECL_UID (field1) - DECL_UID (field2));
1943 }
1944
1945 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1946
1947 tree
1948 gnat_build_constructor (tree type, vec<constructor_elt, va_gc> *v)
1949 {
1950 bool allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1951 bool read_only = true;
1952 bool side_effects = false;
1953 tree result, obj, val;
1954 unsigned int n_elmts;
1955
1956 /* Scan the elements to see if they are all constant or if any has side
1957 effects, to let us set global flags on the resulting constructor. Count
1958 the elements along the way for possible sorting purposes below. */
1959 FOR_EACH_CONSTRUCTOR_ELT (v, n_elmts, obj, val)
1960 {
1961 /* The predicate must be in keeping with output_constructor. */
1962 if ((!TREE_CONSTANT (val) && !TREE_STATIC (val))
1963 || (TREE_CODE (type) == RECORD_TYPE
1964 && CONSTRUCTOR_BITFIELD_P (obj)
1965 && !initializer_constant_valid_for_bitfield_p (val))
1966 || !initializer_constant_valid_p (val, TREE_TYPE (val)))
1967 allconstant = false;
1968
1969 if (!TREE_READONLY (val))
1970 read_only = false;
1971
1972 if (TREE_SIDE_EFFECTS (val))
1973 side_effects = true;
1974 }
1975
1976 /* For record types with constant components only, sort field list
1977 by increasing bit position. This is necessary to ensure the
1978 constructor can be output as static data. */
1979 if (allconstant && TREE_CODE (type) == RECORD_TYPE && n_elmts > 1)
1980 v->qsort (compare_elmt_bitpos);
1981
1982 result = build_constructor (type, v);
1983 CONSTRUCTOR_NO_CLEARING (result) = 1;
1984 TREE_CONSTANT (result) = TREE_STATIC (result) = allconstant;
1985 TREE_SIDE_EFFECTS (result) = side_effects;
1986 TREE_READONLY (result) = TYPE_READONLY (type) || read_only || allconstant;
1987 return result;
1988 }
1989 \f
1990 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1991 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1992 for the field. Don't fold the result if NO_FOLD_P is true.
1993
1994 We also handle the fact that we might have been passed a pointer to the
1995 actual record and know how to look for fields in variant parts. */
1996
1997 tree
1998 build_simple_component_ref (tree record_variable, tree component, tree field,
1999 bool no_fold_p)
2000 {
2001 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
2002 tree base, ref;
2003
2004 gcc_assert (RECORD_OR_UNION_TYPE_P (record_type)
2005 && COMPLETE_TYPE_P (record_type)
2006 && (component == NULL_TREE) != (field == NULL_TREE));
2007
2008 /* If no field was specified, look for a field with the specified name in
2009 the current record only. */
2010 if (!field)
2011 for (field = TYPE_FIELDS (record_type);
2012 field;
2013 field = DECL_CHAIN (field))
2014 if (DECL_NAME (field) == component)
2015 break;
2016
2017 if (!field)
2018 return NULL_TREE;
2019
2020 /* If this field is not in the specified record, see if we can find a field
2021 in the specified record whose original field is the same as this one. */
2022 if (DECL_CONTEXT (field) != record_type)
2023 {
2024 tree new_field;
2025
2026 /* First loop through normal components. */
2027 for (new_field = TYPE_FIELDS (record_type);
2028 new_field;
2029 new_field = DECL_CHAIN (new_field))
2030 if (SAME_FIELD_P (field, new_field))
2031 break;
2032
2033 /* Next, see if we're looking for an inherited component in an extension.
2034 If so, look through the extension directly, unless the type contains
2035 a placeholder, as it might be needed for a later substitution. */
2036 if (!new_field
2037 && TREE_CODE (record_variable) == VIEW_CONVERT_EXPR
2038 && TYPE_ALIGN_OK (record_type)
2039 && !type_contains_placeholder_p (record_type)
2040 && TREE_CODE (TREE_TYPE (TREE_OPERAND (record_variable, 0)))
2041 == RECORD_TYPE
2042 && TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (record_variable, 0))))
2043 {
2044 ref = build_simple_component_ref (TREE_OPERAND (record_variable, 0),
2045 NULL_TREE, field, no_fold_p);
2046 if (ref)
2047 return ref;
2048 }
2049
2050 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2051 component in the first search. Doing this search in two steps is
2052 required to avoid hidden homonymous fields in the _Parent field. */
2053 if (!new_field)
2054 for (new_field = TYPE_FIELDS (record_type);
2055 new_field;
2056 new_field = DECL_CHAIN (new_field))
2057 if (DECL_INTERNAL_P (new_field))
2058 {
2059 tree field_ref
2060 = build_simple_component_ref (record_variable,
2061 NULL_TREE, new_field, no_fold_p);
2062 ref = build_simple_component_ref (field_ref, NULL_TREE, field,
2063 no_fold_p);
2064 if (ref)
2065 return ref;
2066 }
2067
2068 field = new_field;
2069 }
2070
2071 if (!field)
2072 return NULL_TREE;
2073
2074 /* If the field's offset has overflowed, do not try to access it, as doing
2075 so may trigger sanity checks deeper in the back-end. Note that we don't
2076 need to warn since this will be done on trying to declare the object. */
2077 if (TREE_CODE (DECL_FIELD_OFFSET (field)) == INTEGER_CST
2078 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field)))
2079 return NULL_TREE;
2080
2081 /* We have found a suitable field. Before building the COMPONENT_REF, get
2082 the base object of the record variable if possible. */
2083 base = record_variable;
2084
2085 if (TREE_CODE (record_variable) == VIEW_CONVERT_EXPR)
2086 {
2087 tree inner_variable = TREE_OPERAND (record_variable, 0);
2088 tree inner_type = TYPE_MAIN_VARIANT (TREE_TYPE (inner_variable));
2089
2090 /* Look through a conversion between type variants. This is transparent
2091 as far as the field is concerned. */
2092 if (inner_type == record_type)
2093 base = inner_variable;
2094
2095 /* Look through a conversion between original and packable version, but
2096 the field needs to be adjusted in this case. */
2097 else if (RECORD_OR_UNION_TYPE_P (inner_type)
2098 && TYPE_NAME (inner_type) == TYPE_NAME (record_type))
2099 {
2100 tree new_field;
2101
2102 for (new_field = TYPE_FIELDS (inner_type);
2103 new_field;
2104 new_field = DECL_CHAIN (new_field))
2105 if (SAME_FIELD_P (field, new_field))
2106 break;
2107 if (new_field)
2108 {
2109 field = new_field;
2110 base = inner_variable;
2111 }
2112 }
2113 }
2114
2115 ref = build3 (COMPONENT_REF, TREE_TYPE (field), base, field, NULL_TREE);
2116
2117 if (TREE_READONLY (record_variable)
2118 || TREE_READONLY (field)
2119 || TYPE_READONLY (record_type))
2120 TREE_READONLY (ref) = 1;
2121
2122 if (TREE_THIS_VOLATILE (record_variable)
2123 || TREE_THIS_VOLATILE (field)
2124 || TYPE_VOLATILE (record_type))
2125 TREE_THIS_VOLATILE (ref) = 1;
2126
2127 if (no_fold_p)
2128 return ref;
2129
2130 /* The generic folder may punt in this case because the inner array type
2131 can be self-referential, but folding is in fact not problematic. */
2132 if (TREE_CODE (base) == CONSTRUCTOR
2133 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (base)))
2134 {
2135 unsigned int len = CONSTRUCTOR_NELTS (base);
2136 gcc_assert (len > 0);
2137
2138 if (field == CONSTRUCTOR_ELT (base, 0)->index)
2139 return CONSTRUCTOR_ELT (base, 0)->value;
2140
2141 if (len > 1)
2142 {
2143 if (field == CONSTRUCTOR_ELT (base, 1)->index)
2144 return CONSTRUCTOR_ELT (base, 1)->value;
2145 }
2146 else
2147 return NULL_TREE;
2148
2149 return ref;
2150 }
2151
2152 return fold (ref);
2153 }
2154
2155 /* Likewise, but generate a Constraint_Error if the reference could not be
2156 found. */
2157
2158 tree
2159 build_component_ref (tree record_variable, tree component, tree field,
2160 bool no_fold_p)
2161 {
2162 tree ref = build_simple_component_ref (record_variable, component, field,
2163 no_fold_p);
2164 if (ref)
2165 return ref;
2166
2167 /* If FIELD was specified, assume this is an invalid user field so raise
2168 Constraint_Error. Otherwise, we have no type to return so abort. */
2169 gcc_assert (field);
2170 return build1 (NULL_EXPR, TREE_TYPE (field),
2171 build_call_raise (CE_Discriminant_Check_Failed, Empty,
2172 N_Raise_Constraint_Error));
2173 }
2174 \f
2175 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2176 identically. Process the case where a GNAT_PROC to call is provided. */
2177
2178 static inline tree
2179 build_call_alloc_dealloc_proc (tree gnu_obj, tree gnu_size, tree gnu_type,
2180 Entity_Id gnat_proc, Entity_Id gnat_pool)
2181 {
2182 tree gnu_proc = gnat_to_gnu (gnat_proc);
2183 tree gnu_call;
2184
2185 /* A storage pool's underlying type is a record type (for both predefined
2186 storage pools and GNAT simple storage pools). The secondary stack uses
2187 the same mechanism, but its pool object (SS_Pool) is an integer. */
2188 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool))))
2189 {
2190 /* The size is the third parameter; the alignment is the
2191 same type. */
2192 Entity_Id gnat_size_type
2193 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
2194 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2195
2196 tree gnu_pool = gnat_to_gnu (gnat_pool);
2197 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
2198 tree gnu_align = size_int (TYPE_ALIGN (gnu_type) / BITS_PER_UNIT);
2199
2200 gnu_size = convert (gnu_size_type, gnu_size);
2201 gnu_align = convert (gnu_size_type, gnu_align);
2202
2203 /* The first arg is always the address of the storage pool; next
2204 comes the address of the object, for a deallocator, then the
2205 size and alignment. */
2206 if (gnu_obj)
2207 gnu_call = build_call_n_expr (gnu_proc, 4, gnu_pool_addr, gnu_obj,
2208 gnu_size, gnu_align);
2209 else
2210 gnu_call = build_call_n_expr (gnu_proc, 3, gnu_pool_addr,
2211 gnu_size, gnu_align);
2212 }
2213
2214 /* Secondary stack case. */
2215 else
2216 {
2217 /* The size is the second parameter. */
2218 Entity_Id gnat_size_type
2219 = Etype (Next_Formal (First_Formal (gnat_proc)));
2220 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
2221
2222 gnu_size = convert (gnu_size_type, gnu_size);
2223
2224 /* The first arg is the address of the object, for a deallocator,
2225 then the size. */
2226 if (gnu_obj)
2227 gnu_call = build_call_n_expr (gnu_proc, 2, gnu_obj, gnu_size);
2228 else
2229 gnu_call = build_call_n_expr (gnu_proc, 1, gnu_size);
2230 }
2231
2232 return gnu_call;
2233 }
2234
2235 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2236 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2237 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2238 latter offers. */
2239
2240 static inline tree
2241 maybe_wrap_malloc (tree data_size, tree data_type, Node_Id gnat_node)
2242 {
2243 /* When the DATA_TYPE alignment is stricter than what malloc offers
2244 (super-aligned case), we allocate an "aligning" wrapper type and return
2245 the address of its single data field with the malloc's return value
2246 stored just in front. */
2247
2248 unsigned int data_align = TYPE_ALIGN (data_type);
2249 unsigned int system_allocator_alignment
2250 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2251
2252 tree aligning_type
2253 = ((data_align > system_allocator_alignment)
2254 ? make_aligning_type (data_type, data_align, data_size,
2255 system_allocator_alignment,
2256 POINTER_SIZE / BITS_PER_UNIT,
2257 gnat_node)
2258 : NULL_TREE);
2259
2260 tree size_to_malloc
2261 = aligning_type ? TYPE_SIZE_UNIT (aligning_type) : data_size;
2262
2263 tree malloc_ptr = build_call_n_expr (malloc_decl, 1, size_to_malloc);
2264
2265 if (aligning_type)
2266 {
2267 /* Latch malloc's return value and get a pointer to the aligning field
2268 first. */
2269 tree storage_ptr = gnat_protect_expr (malloc_ptr);
2270
2271 tree aligning_record_addr
2272 = convert (build_pointer_type (aligning_type), storage_ptr);
2273
2274 tree aligning_record
2275 = build_unary_op (INDIRECT_REF, NULL_TREE, aligning_record_addr);
2276
2277 tree aligning_field
2278 = build_component_ref (aligning_record, NULL_TREE,
2279 TYPE_FIELDS (aligning_type), false);
2280
2281 tree aligning_field_addr
2282 = build_unary_op (ADDR_EXPR, NULL_TREE, aligning_field);
2283
2284 /* Then arrange to store the allocator's return value ahead
2285 and return. */
2286 tree storage_ptr_slot_addr
2287 = build_binary_op (POINTER_PLUS_EXPR, ptr_type_node,
2288 convert (ptr_type_node, aligning_field_addr),
2289 size_int (-(HOST_WIDE_INT) POINTER_SIZE
2290 / BITS_PER_UNIT));
2291
2292 tree storage_ptr_slot
2293 = build_unary_op (INDIRECT_REF, NULL_TREE,
2294 convert (build_pointer_type (ptr_type_node),
2295 storage_ptr_slot_addr));
2296
2297 return
2298 build2 (COMPOUND_EXPR, TREE_TYPE (aligning_field_addr),
2299 build_binary_op (INIT_EXPR, NULL_TREE,
2300 storage_ptr_slot, storage_ptr),
2301 aligning_field_addr);
2302 }
2303 else
2304 return malloc_ptr;
2305 }
2306
2307 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2308 designated by DATA_PTR using the __gnat_free entry point. */
2309
2310 static inline tree
2311 maybe_wrap_free (tree data_ptr, tree data_type)
2312 {
2313 /* In the regular alignment case, we pass the data pointer straight to free.
2314 In the superaligned case, we need to retrieve the initial allocator
2315 return value, stored in front of the data block at allocation time. */
2316
2317 unsigned int data_align = TYPE_ALIGN (data_type);
2318 unsigned int system_allocator_alignment
2319 = get_target_system_allocator_alignment () * BITS_PER_UNIT;
2320
2321 tree free_ptr;
2322
2323 if (data_align > system_allocator_alignment)
2324 {
2325 /* DATA_FRONT_PTR (void *)
2326 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2327 tree data_front_ptr
2328 = build_binary_op
2329 (POINTER_PLUS_EXPR, ptr_type_node,
2330 convert (ptr_type_node, data_ptr),
2331 size_int (-(HOST_WIDE_INT) POINTER_SIZE / BITS_PER_UNIT));
2332
2333 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2334 free_ptr
2335 = build_unary_op
2336 (INDIRECT_REF, NULL_TREE,
2337 convert (build_pointer_type (ptr_type_node), data_front_ptr));
2338 }
2339 else
2340 free_ptr = data_ptr;
2341
2342 return build_call_n_expr (free_decl, 1, free_ptr);
2343 }
2344
2345 /* Build a GCC tree to call an allocation or deallocation function.
2346 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2347 generate an allocator.
2348
2349 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2350 object type, used to determine the to-be-honored address alignment.
2351 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2352 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2353 to provide an error location for restriction violation messages. */
2354
2355 tree
2356 build_call_alloc_dealloc (tree gnu_obj, tree gnu_size, tree gnu_type,
2357 Entity_Id gnat_proc, Entity_Id gnat_pool,
2358 Node_Id gnat_node)
2359 {
2360 gnu_size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (gnu_size, gnu_obj);
2361
2362 /* Explicit proc to call ? This one is assumed to deal with the type
2363 alignment constraints. */
2364 if (Present (gnat_proc))
2365 return build_call_alloc_dealloc_proc (gnu_obj, gnu_size, gnu_type,
2366 gnat_proc, gnat_pool);
2367
2368 /* Otherwise, object to "free" or "malloc" with possible special processing
2369 for alignments stricter than what the default allocator honors. */
2370 else if (gnu_obj)
2371 return maybe_wrap_free (gnu_obj, gnu_type);
2372 else
2373 {
2374 /* Assert that we no longer can be called with this special pool. */
2375 gcc_assert (gnat_pool != -1);
2376
2377 /* Check that we aren't violating the associated restriction. */
2378 if (!(Nkind (gnat_node) == N_Allocator && Comes_From_Source (gnat_node)))
2379 Check_No_Implicit_Heap_Alloc (gnat_node);
2380
2381 return maybe_wrap_malloc (gnu_size, gnu_type, gnat_node);
2382 }
2383 }
2384 \f
2385 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2386 initial value is INIT, if INIT is nonzero. Convert the expression to
2387 RESULT_TYPE, which must be some pointer type, and return the result.
2388
2389 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2390 the storage pool to use. GNAT_NODE is used to provide an error
2391 location for restriction violation messages. If IGNORE_INIT_TYPE is
2392 true, ignore the type of INIT for the purpose of determining the size;
2393 this will cause the maximum size to be allocated if TYPE is of
2394 self-referential size. */
2395
2396 tree
2397 build_allocator (tree type, tree init, tree result_type, Entity_Id gnat_proc,
2398 Entity_Id gnat_pool, Node_Id gnat_node, bool ignore_init_type)
2399 {
2400 tree size, storage, storage_deref, storage_init;
2401
2402 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2403 if (init && TREE_CODE (init) == NULL_EXPR)
2404 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
2405
2406 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2407 else if (init && TREE_CODE (init) == COND_EXPR)
2408 return build3 (COND_EXPR, result_type, TREE_OPERAND (init, 0),
2409 build_allocator (type, TREE_OPERAND (init, 1), result_type,
2410 gnat_proc, gnat_pool, gnat_node,
2411 ignore_init_type),
2412 build_allocator (type, TREE_OPERAND (init, 2), result_type,
2413 gnat_proc, gnat_pool, gnat_node,
2414 ignore_init_type));
2415
2416 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2417 sizes of the object and its template. Allocate the whole thing and
2418 fill in the parts that are known. */
2419 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type))
2420 {
2421 tree storage_type
2422 = build_unc_object_type_from_ptr (result_type, type,
2423 get_identifier ("ALLOC"), false);
2424 tree template_type = TREE_TYPE (TYPE_FIELDS (storage_type));
2425 tree storage_ptr_type = build_pointer_type (storage_type);
2426
2427 size = SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type),
2428 init);
2429
2430 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2431 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2432 size = size_int (-1);
2433
2434 storage = build_call_alloc_dealloc (NULL_TREE, size, storage_type,
2435 gnat_proc, gnat_pool, gnat_node);
2436 storage = convert (storage_ptr_type, gnat_protect_expr (storage));
2437 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2438 TREE_THIS_NOTRAP (storage_deref) = 1;
2439
2440 /* If there is an initializing expression, then make a constructor for
2441 the entire object including the bounds and copy it into the object.
2442 If there is no initializing expression, just set the bounds. */
2443 if (init)
2444 {
2445 vec<constructor_elt, va_gc> *v;
2446 vec_alloc (v, 2);
2447
2448 CONSTRUCTOR_APPEND_ELT (v, TYPE_FIELDS (storage_type),
2449 build_template (template_type, type, init));
2450 CONSTRUCTOR_APPEND_ELT (v, DECL_CHAIN (TYPE_FIELDS (storage_type)),
2451 init);
2452 storage_init
2453 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref,
2454 gnat_build_constructor (storage_type, v));
2455 }
2456 else
2457 storage_init
2458 = build_binary_op (INIT_EXPR, NULL_TREE,
2459 build_component_ref (storage_deref, NULL_TREE,
2460 TYPE_FIELDS (storage_type),
2461 false),
2462 build_template (template_type, type, NULL_TREE));
2463
2464 return build2 (COMPOUND_EXPR, result_type,
2465 storage_init, convert (result_type, storage));
2466 }
2467
2468 size = TYPE_SIZE_UNIT (type);
2469
2470 /* If we have an initializing expression, see if its size is simpler
2471 than the size from the type. */
2472 if (!ignore_init_type && init && TYPE_SIZE_UNIT (TREE_TYPE (init))
2473 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
2474 || CONTAINS_PLACEHOLDER_P (size)))
2475 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
2476
2477 /* If the size is still self-referential, reference the initializing
2478 expression, if it is present. If not, this must have been a
2479 call to allocate a library-level object, in which case we use
2480 the maximum size. */
2481 if (CONTAINS_PLACEHOLDER_P (size))
2482 {
2483 if (!ignore_init_type && init)
2484 size = substitute_placeholder_in_expr (size, init);
2485 else
2486 size = max_size (size, true);
2487 }
2488
2489 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2490 if (TREE_CODE (size) == INTEGER_CST && !valid_constant_size_p (size))
2491 size = size_int (-1);
2492
2493 storage = convert (result_type,
2494 build_call_alloc_dealloc (NULL_TREE, size, type,
2495 gnat_proc, gnat_pool,
2496 gnat_node));
2497
2498 /* If we have an initial value, protect the new address, assign the value
2499 and return the address with a COMPOUND_EXPR. */
2500 if (init)
2501 {
2502 storage = gnat_protect_expr (storage);
2503 storage_deref = build_unary_op (INDIRECT_REF, NULL_TREE, storage);
2504 TREE_THIS_NOTRAP (storage_deref) = 1;
2505 storage_init
2506 = build_binary_op (INIT_EXPR, NULL_TREE, storage_deref, init);
2507 return build2 (COMPOUND_EXPR, result_type, storage_init, storage);
2508 }
2509
2510 return storage;
2511 }
2512 \f
2513 /* Indicate that we need to take the address of T and that it therefore
2514 should not be allocated in a register. Returns true if successful. */
2515
2516 bool
2517 gnat_mark_addressable (tree t)
2518 {
2519 while (true)
2520 switch (TREE_CODE (t))
2521 {
2522 case ADDR_EXPR:
2523 case COMPONENT_REF:
2524 case ARRAY_REF:
2525 case ARRAY_RANGE_REF:
2526 case REALPART_EXPR:
2527 case IMAGPART_EXPR:
2528 case VIEW_CONVERT_EXPR:
2529 case NON_LVALUE_EXPR:
2530 CASE_CONVERT:
2531 t = TREE_OPERAND (t, 0);
2532 break;
2533
2534 case COMPOUND_EXPR:
2535 t = TREE_OPERAND (t, 1);
2536 break;
2537
2538 case CONSTRUCTOR:
2539 TREE_ADDRESSABLE (t) = 1;
2540 return true;
2541
2542 case VAR_DECL:
2543 case PARM_DECL:
2544 case RESULT_DECL:
2545 TREE_ADDRESSABLE (t) = 1;
2546 return true;
2547
2548 case FUNCTION_DECL:
2549 TREE_ADDRESSABLE (t) = 1;
2550 return true;
2551
2552 case CONST_DECL:
2553 return DECL_CONST_CORRESPONDING_VAR (t)
2554 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t));
2555
2556 default:
2557 return true;
2558 }
2559 }
2560 \f
2561 /* Return true if EXP is a stable expression for the purpose of the functions
2562 below and, therefore, can be returned unmodified by them. We accept things
2563 that are actual constants or that have already been handled. */
2564
2565 static bool
2566 gnat_stable_expr_p (tree exp)
2567 {
2568 enum tree_code code = TREE_CODE (exp);
2569 return TREE_CONSTANT (exp) || code == NULL_EXPR || code == SAVE_EXPR;
2570 }
2571
2572 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.c
2573 but we know how to handle our own nodes. */
2574
2575 tree
2576 gnat_save_expr (tree exp)
2577 {
2578 tree type = TREE_TYPE (exp);
2579 enum tree_code code = TREE_CODE (exp);
2580
2581 if (gnat_stable_expr_p (exp))
2582 return exp;
2583
2584 if (code == UNCONSTRAINED_ARRAY_REF)
2585 {
2586 tree t = build1 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)));
2587 TREE_READONLY (t) = TYPE_READONLY (type);
2588 return t;
2589 }
2590
2591 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2592 This may be more efficient, but will also allow us to more easily find
2593 the match for the PLACEHOLDER_EXPR. */
2594 if (code == COMPONENT_REF
2595 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2596 return build3 (code, type, gnat_save_expr (TREE_OPERAND (exp, 0)),
2597 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2598
2599 return save_expr (exp);
2600 }
2601
2602 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2603 is optimized under the assumption that EXP's value doesn't change before
2604 its subsequent reuse(s) except through its potential reevaluation. */
2605
2606 tree
2607 gnat_protect_expr (tree exp)
2608 {
2609 tree type = TREE_TYPE (exp);
2610 enum tree_code code = TREE_CODE (exp);
2611
2612 if (gnat_stable_expr_p (exp))
2613 return exp;
2614
2615 /* If EXP has no side effects, we theoretically don't need to do anything.
2616 However, we may be recursively passed more and more complex expressions
2617 involving checks which will be reused multiple times and eventually be
2618 unshared for gimplification; in order to avoid a complexity explosion
2619 at that point, we protect any expressions more complex than a simple
2620 arithmetic expression. */
2621 if (!TREE_SIDE_EFFECTS (exp))
2622 {
2623 tree inner = skip_simple_arithmetic (exp);
2624 if (!EXPR_P (inner) || REFERENCE_CLASS_P (inner))
2625 return exp;
2626 }
2627
2628 /* If this is a conversion, protect what's inside the conversion. */
2629 if (code == NON_LVALUE_EXPR
2630 || CONVERT_EXPR_CODE_P (code)
2631 || code == VIEW_CONVERT_EXPR)
2632 return build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2633
2634 /* If we're indirectly referencing something, we only need to protect the
2635 address since the data itself can't change in these situations. */
2636 if (code == INDIRECT_REF || code == UNCONSTRAINED_ARRAY_REF)
2637 {
2638 tree t = build1 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)));
2639 TREE_READONLY (t) = TYPE_READONLY (type);
2640 return t;
2641 }
2642
2643 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2644 This may be more efficient, but will also allow us to more easily find
2645 the match for the PLACEHOLDER_EXPR. */
2646 if (code == COMPONENT_REF
2647 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2648 return build3 (code, type, gnat_protect_expr (TREE_OPERAND (exp, 0)),
2649 TREE_OPERAND (exp, 1), TREE_OPERAND (exp, 2));
2650
2651 /* If this is a fat pointer or something that can be placed in a register,
2652 just make a SAVE_EXPR. Likewise for a CALL_EXPR as large objects are
2653 returned via invisible reference in most ABIs so the temporary will
2654 directly be filled by the callee. */
2655 if (TYPE_IS_FAT_POINTER_P (type)
2656 || TYPE_MODE (type) != BLKmode
2657 || code == CALL_EXPR)
2658 return save_expr (exp);
2659
2660 /* Otherwise reference, protect the address and dereference. */
2661 return
2662 build_unary_op (INDIRECT_REF, type,
2663 save_expr (build_unary_op (ADDR_EXPR,
2664 build_reference_type (type),
2665 exp)));
2666 }
2667
2668 /* This is equivalent to stabilize_reference_1 in tree.c but we take an extra
2669 argument to force evaluation of everything. */
2670
2671 static tree
2672 gnat_stabilize_reference_1 (tree e, void *data)
2673 {
2674 const bool force = *(bool *)data;
2675 enum tree_code code = TREE_CODE (e);
2676 tree type = TREE_TYPE (e);
2677 tree result;
2678
2679 if (gnat_stable_expr_p (e))
2680 return e;
2681
2682 switch (TREE_CODE_CLASS (code))
2683 {
2684 case tcc_exceptional:
2685 case tcc_declaration:
2686 case tcc_comparison:
2687 case tcc_expression:
2688 case tcc_reference:
2689 case tcc_vl_exp:
2690 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2691 fat pointer. This may be more efficient, but will also allow
2692 us to more easily find the match for the PLACEHOLDER_EXPR. */
2693 if (code == COMPONENT_REF
2694 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e, 0))))
2695 result
2696 = build3 (code, type,
2697 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2698 TREE_OPERAND (e, 1), TREE_OPERAND (e, 2));
2699 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2700 so that it will only be evaluated once. */
2701 /* The tcc_reference and tcc_comparison classes could be handled as
2702 below, but it is generally faster to only evaluate them once. */
2703 else if (TREE_SIDE_EFFECTS (e) || force)
2704 return save_expr (e);
2705 else
2706 return e;
2707 break;
2708
2709 case tcc_binary:
2710 /* Recursively stabilize each operand. */
2711 result
2712 = build2 (code, type,
2713 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data),
2714 gnat_stabilize_reference_1 (TREE_OPERAND (e, 1), data));
2715 break;
2716
2717 case tcc_unary:
2718 /* Recursively stabilize each operand. */
2719 result
2720 = build1 (code, type,
2721 gnat_stabilize_reference_1 (TREE_OPERAND (e, 0), data));
2722 break;
2723
2724 default:
2725 gcc_unreachable ();
2726 }
2727
2728 TREE_READONLY (result) = TREE_READONLY (e);
2729 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (e);
2730 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (e);
2731
2732 return result;
2733 }
2734
2735 /* This is equivalent to stabilize_reference in tree.c but we know how to
2736 handle our own nodes and we take extra arguments. FORCE says whether to
2737 force evaluation of everything in REF. INIT is set to the first arm of
2738 a COMPOUND_EXPR present in REF, if any. */
2739
2740 tree
2741 gnat_stabilize_reference (tree ref, bool force, tree *init)
2742 {
2743 return
2744 gnat_rewrite_reference (ref, gnat_stabilize_reference_1, &force, init);
2745 }
2746
2747 /* Rewrite reference REF and call FUNC on each expression within REF in the
2748 process. DATA is passed unmodified to FUNC. INIT is set to the first
2749 arm of a COMPOUND_EXPR present in REF, if any. */
2750
2751 tree
2752 gnat_rewrite_reference (tree ref, rewrite_fn func, void *data, tree *init)
2753 {
2754 tree type = TREE_TYPE (ref);
2755 enum tree_code code = TREE_CODE (ref);
2756 tree result;
2757
2758 switch (code)
2759 {
2760 case CONST_DECL:
2761 case VAR_DECL:
2762 case PARM_DECL:
2763 case RESULT_DECL:
2764 /* No action is needed in this case. */
2765 return ref;
2766
2767 CASE_CONVERT:
2768 case FLOAT_EXPR:
2769 case FIX_TRUNC_EXPR:
2770 case VIEW_CONVERT_EXPR:
2771 result
2772 = build1 (code, type,
2773 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2774 init));
2775 break;
2776
2777 case INDIRECT_REF:
2778 case UNCONSTRAINED_ARRAY_REF:
2779 result = build1 (code, type, func (TREE_OPERAND (ref, 0), data));
2780 break;
2781
2782 case COMPONENT_REF:
2783 result = build3 (COMPONENT_REF, type,
2784 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2785 data, init),
2786 TREE_OPERAND (ref, 1), NULL_TREE);
2787 break;
2788
2789 case BIT_FIELD_REF:
2790 result = build3 (BIT_FIELD_REF, type,
2791 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func,
2792 data, init),
2793 TREE_OPERAND (ref, 1), TREE_OPERAND (ref, 2));
2794 break;
2795
2796 case ARRAY_REF:
2797 case ARRAY_RANGE_REF:
2798 result
2799 = build4 (code, type,
2800 gnat_rewrite_reference (TREE_OPERAND (ref, 0), func, data,
2801 init),
2802 func (TREE_OPERAND (ref, 1), data),
2803 TREE_OPERAND (ref, 2), TREE_OPERAND (ref, 3));
2804 break;
2805
2806 case COMPOUND_EXPR:
2807 gcc_assert (*init == NULL_TREE);
2808 *init = TREE_OPERAND (ref, 0);
2809 /* We expect only the pattern built in Call_to_gnu. */
2810 gcc_assert (DECL_P (TREE_OPERAND (ref, 1)));
2811 return TREE_OPERAND (ref, 1);
2812
2813 case CALL_EXPR:
2814 {
2815 /* This can only be an atomic load. */
2816 gcc_assert (call_is_atomic_load (ref));
2817
2818 /* An atomic load is an INDIRECT_REF of its first argument. */
2819 tree t = CALL_EXPR_ARG (ref, 0);
2820 if (TREE_CODE (t) == NOP_EXPR)
2821 t = TREE_OPERAND (t, 0);
2822 if (TREE_CODE (t) == ADDR_EXPR)
2823 t = build1 (ADDR_EXPR, TREE_TYPE (t),
2824 gnat_rewrite_reference (TREE_OPERAND (t, 0), func, data,
2825 init));
2826 else
2827 t = func (t, data);
2828 t = fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref, 0)), t);
2829
2830 result = build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref), 0), 2,
2831 t, CALL_EXPR_ARG (ref, 1));
2832 }
2833 break;
2834
2835 case ERROR_MARK:
2836 return error_mark_node;
2837
2838 default:
2839 gcc_unreachable ();
2840 }
2841
2842 /* TREE_THIS_VOLATILE and TREE_SIDE_EFFECTS set on the initial expression
2843 may not be sustained across some paths, such as the way via build1 for
2844 INDIRECT_REF. We reset those flags here in the general case, which is
2845 consistent with the GCC version of this routine.
2846
2847 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2848 paths introduce side-effects where there was none initially (e.g. if a
2849 SAVE_EXPR is built) and we also want to keep track of that. */
2850 TREE_READONLY (result) = TREE_READONLY (ref);
2851 TREE_SIDE_EFFECTS (result) |= TREE_SIDE_EFFECTS (ref);
2852 TREE_THIS_VOLATILE (result) = TREE_THIS_VOLATILE (ref);
2853
2854 if (code == INDIRECT_REF
2855 || code == UNCONSTRAINED_ARRAY_REF
2856 || code == ARRAY_REF
2857 || code == ARRAY_RANGE_REF)
2858 TREE_THIS_NOTRAP (result) = TREE_THIS_NOTRAP (ref);
2859
2860 return result;
2861 }
2862
2863 /* This is equivalent to get_inner_reference in expr.c but it returns the
2864 ultimate containing object only if the reference (lvalue) is constant,
2865 i.e. if it doesn't depend on the context in which it is evaluated. */
2866
2867 tree
2868 get_inner_constant_reference (tree exp)
2869 {
2870 while (true)
2871 {
2872 switch (TREE_CODE (exp))
2873 {
2874 case BIT_FIELD_REF:
2875 break;
2876
2877 case COMPONENT_REF:
2878 if (TREE_OPERAND (exp, 2) != NULL_TREE)
2879 return NULL_TREE;
2880
2881 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp, 1))))
2882 return NULL_TREE;
2883 break;
2884
2885 case ARRAY_REF:
2886 case ARRAY_RANGE_REF:
2887 {
2888 if (TREE_OPERAND (exp, 2) != NULL_TREE
2889 || TREE_OPERAND (exp, 3) != NULL_TREE)
2890 return NULL_TREE;
2891
2892 tree array_type = TREE_TYPE (TREE_OPERAND (exp, 0));
2893 if (!TREE_CONSTANT (TREE_OPERAND (exp, 1))
2894 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type)))
2895 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type))))
2896 return NULL_TREE;
2897 }
2898 break;
2899
2900 case REALPART_EXPR:
2901 case IMAGPART_EXPR:
2902 case VIEW_CONVERT_EXPR:
2903 break;
2904
2905 default:
2906 goto done;
2907 }
2908
2909 exp = TREE_OPERAND (exp, 0);
2910 }
2911
2912 done:
2913 return exp;
2914 }
2915
2916 /* If EXPR is an expression that is invariant in the current function, in the
2917 sense that it can be evaluated anywhere in the function and any number of
2918 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2919
2920 tree
2921 gnat_invariant_expr (tree expr)
2922 {
2923 tree type = TREE_TYPE (expr), t;
2924
2925 expr = remove_conversions (expr, false);
2926
2927 while ((TREE_CODE (expr) == CONST_DECL
2928 || (TREE_CODE (expr) == VAR_DECL && TREE_READONLY (expr)))
2929 && decl_function_context (expr) == current_function_decl
2930 && DECL_INITIAL (expr))
2931 {
2932 expr = DECL_INITIAL (expr);
2933 /* Look into CONSTRUCTORs built to initialize padded types. */
2934 if (TYPE_IS_PADDING_P (TREE_TYPE (expr)))
2935 expr = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (expr))), expr);
2936 expr = remove_conversions (expr, false);
2937 }
2938
2939 /* We are only interested in scalar types at the moment and, even if we may
2940 have gone through padding types in the above loop, we must be back to a
2941 scalar value at this point. */
2942 if (AGGREGATE_TYPE_P (TREE_TYPE (expr)))
2943 return NULL_TREE;
2944
2945 if (TREE_CONSTANT (expr))
2946 return fold_convert (type, expr);
2947
2948 t = expr;
2949
2950 while (true)
2951 {
2952 switch (TREE_CODE (t))
2953 {
2954 case COMPONENT_REF:
2955 if (TREE_OPERAND (t, 2) != NULL_TREE)
2956 return NULL_TREE;
2957 break;
2958
2959 case ARRAY_REF:
2960 case ARRAY_RANGE_REF:
2961 if (!TREE_CONSTANT (TREE_OPERAND (t, 1))
2962 || TREE_OPERAND (t, 2) != NULL_TREE
2963 || TREE_OPERAND (t, 3) != NULL_TREE)
2964 return NULL_TREE;
2965 break;
2966
2967 case BIT_FIELD_REF:
2968 case VIEW_CONVERT_EXPR:
2969 case REALPART_EXPR:
2970 case IMAGPART_EXPR:
2971 break;
2972
2973 case INDIRECT_REF:
2974 if (!TREE_READONLY (t)
2975 || TREE_SIDE_EFFECTS (t)
2976 || !TREE_THIS_NOTRAP (t))
2977 return NULL_TREE;
2978 break;
2979
2980 default:
2981 goto object;
2982 }
2983
2984 t = TREE_OPERAND (t, 0);
2985 }
2986
2987 object:
2988 if (TREE_SIDE_EFFECTS (t))
2989 return NULL_TREE;
2990
2991 if (TREE_CODE (t) == CONST_DECL
2992 && (DECL_EXTERNAL (t)
2993 || decl_function_context (t) != current_function_decl))
2994 return fold_convert (type, expr);
2995
2996 if (!TREE_READONLY (t))
2997 return NULL_TREE;
2998
2999 if (TREE_CODE (t) == PARM_DECL)
3000 return fold_convert (type, expr);
3001
3002 if (TREE_CODE (t) == VAR_DECL
3003 && (DECL_EXTERNAL (t)
3004 || decl_function_context (t) != current_function_decl))
3005 return fold_convert (type, expr);
3006
3007 return NULL_TREE;
3008 }