]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/ada/utils2.c
6d76a4149ce2ce9fb68e3f79d58f4c398d564128
[thirdparty/gcc.git] / gcc / ada / utils2.c
1 /****************************************************************************
2 * *
3 * GNAT COMPILER COMPONENTS *
4 * *
5 * U T I L S 2 *
6 * *
7 * C Implementation File *
8 * *
9 * *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
11 * *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
22 * *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
25 * *
26 ****************************************************************************/
27
28 #include "config.h"
29 #include "system.h"
30 #include "tree.h"
31 #include "flags.h"
32 #include "ada.h"
33 #include "types.h"
34 #include "atree.h"
35 #include "stringt.h"
36 #include "uintp.h"
37 #include "fe.h"
38 #include "elists.h"
39 #include "nlists.h"
40 #include "sinfo.h"
41 #include "einfo.h"
42 #include "ada-tree.h"
43 #include "gigi.h"
44
45 static tree find_common_type PARAMS ((tree, tree));
46 static int contains_save_expr_p PARAMS ((tree));
47 static tree contains_null_expr PARAMS ((tree));
48 static tree compare_arrays PARAMS ((tree, tree, tree));
49 static tree nonbinary_modular_operation PARAMS ((enum tree_code, tree,
50 tree, tree));
51 static tree build_simple_component_ref PARAMS ((tree, tree, tree));
52 \f
53 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
54 operation.
55
56 This preparation consists of taking the ordinary representation of
57 an expression expr and producing a valid tree boolean expression
58 describing whether expr is nonzero. We could simply always do
59
60 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
61
62 but we optimize comparisons, &&, ||, and !.
63
64 The resulting type should always be the same as the input type.
65 This function is simpler than the corresponding C version since
66 the only possible operands will be things of Boolean type. */
67
68 tree
69 gnat_truthvalue_conversion (expr)
70 tree expr;
71 {
72 tree type = TREE_TYPE (expr);
73
74 switch (TREE_CODE (expr))
75 {
76 case EQ_EXPR: case NE_EXPR: case LE_EXPR: case GE_EXPR:
77 case LT_EXPR: case GT_EXPR:
78 case TRUTH_ANDIF_EXPR:
79 case TRUTH_ORIF_EXPR:
80 case TRUTH_AND_EXPR:
81 case TRUTH_OR_EXPR:
82 case TRUTH_XOR_EXPR:
83 case ERROR_MARK:
84 return expr;
85
86 case COND_EXPR:
87 /* Distribute the conversion into the arms of a COND_EXPR. */
88 return fold
89 (build (COND_EXPR, type, TREE_OPERAND (expr, 0),
90 gnat_truthvalue_conversion (TREE_OPERAND (expr, 1)),
91 gnat_truthvalue_conversion (TREE_OPERAND (expr, 2))));
92
93 case WITH_RECORD_EXPR:
94 return build (WITH_RECORD_EXPR, type,
95 gnat_truthvalue_conversion (TREE_OPERAND (expr, 0)),
96 TREE_OPERAND (expr, 1));
97
98 default:
99 return build_binary_op (NE_EXPR, type, expr,
100 convert (type, integer_zero_node));
101 }
102 }
103 \f
104 /* Return the base type of TYPE. */
105
106 tree
107 get_base_type (type)
108 tree type;
109 {
110 if (TREE_CODE (type) == RECORD_TYPE
111 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type))
112 type = TREE_TYPE (TYPE_FIELDS (type));
113
114 while (TREE_TYPE (type) != 0
115 && (TREE_CODE (type) == INTEGER_TYPE
116 || TREE_CODE (type) == REAL_TYPE))
117 type = TREE_TYPE (type);
118
119 return type;
120 }
121
122 /* Likewise, but only return types known to the Ada source. */
123 tree
124 get_ada_base_type (type)
125 tree type;
126 {
127 while (TREE_TYPE (type) != 0
128 && (TREE_CODE (type) == INTEGER_TYPE
129 || TREE_CODE (type) == REAL_TYPE)
130 && ! TYPE_EXTRA_SUBTYPE_P (type))
131 type = TREE_TYPE (type);
132
133 return type;
134 }
135 \f
136 /* EXP is a GCC tree representing an address. See if we can find how
137 strictly the object at that address is aligned. Return that alignment
138 in bits. If we don't know anything about the alignment, return 0.
139 We do not go merely by type information here since the check on
140 N_Validate_Unchecked_Alignment does that. */
141
142 unsigned int
143 known_alignment (exp)
144 tree exp;
145 {
146 unsigned int lhs, rhs;
147
148 switch (TREE_CODE (exp))
149 {
150 case CONVERT_EXPR:
151 case NOP_EXPR:
152 case NON_LVALUE_EXPR:
153 /* Conversions between pointers and integers don't change the alignment
154 of the underlying object. */
155 return known_alignment (TREE_OPERAND (exp, 0));
156
157 case PLUS_EXPR:
158 case MINUS_EXPR:
159 /* If two address are added, the alignment of the result is the
160 minimum of the two aligments. */
161 lhs = known_alignment (TREE_OPERAND (exp, 0));
162 rhs = known_alignment (TREE_OPERAND (exp, 1));
163 return MIN (lhs, rhs);
164
165 case INTEGER_CST:
166 /* The first part of this represents the lowest bit in the constant,
167 but is it in bytes, not bits. */
168 return MIN (BITS_PER_UNIT
169 * (TREE_INT_CST_LOW (exp) & - TREE_INT_CST_LOW (exp)),
170 BIGGEST_ALIGNMENT);
171
172 case MULT_EXPR:
173 /* If we know the alignment of just one side, use it. Otherwise,
174 use the product of the alignments. */
175 lhs = known_alignment (TREE_OPERAND (exp, 0));
176 rhs = known_alignment (TREE_OPERAND (exp, 1));
177 if (lhs == 0 || rhs == 0)
178 return MIN (BIGGEST_ALIGNMENT, MAX (lhs, rhs));
179
180 return MIN (BIGGEST_ALIGNMENT, lhs * rhs);
181
182 case ADDR_EXPR:
183 return expr_align (TREE_OPERAND (exp, 0));
184
185 default:
186 return 0;
187 }
188 }
189 \f
190 /* We have a comparison or assignment operation on two types, T1 and T2,
191 which are both either array types or both record types.
192 Return the type that both operands should be converted to, if any.
193 Otherwise return zero. */
194
195 static tree
196 find_common_type (t1, t2)
197 tree t1, t2;
198 {
199 /* If either type is non-BLKmode, use it. Note that we know that we will
200 not have any alignment problems since if we did the non-BLKmode
201 type could not have been used. */
202 if (TYPE_MODE (t1) != BLKmode)
203 return t1;
204 else if (TYPE_MODE (t2) != BLKmode)
205 return t2;
206
207 /* Otherwise, return the type that has a constant size. */
208 if (TREE_CONSTANT (TYPE_SIZE (t1)))
209 return t1;
210 else if (TREE_CONSTANT (TYPE_SIZE (t2)))
211 return t2;
212
213 /* In this case, both types have variable size. It's probably
214 best to leave the "type mismatch" because changing it could
215 case a bad self-referential reference. */
216 return 0;
217 }
218 \f
219 /* See if EXP contains a SAVE_EXPR in a position where we would
220 normally put it.
221
222 ??? This is a real kludge, but is probably the best approach short
223 of some very general solution. */
224
225 static int
226 contains_save_expr_p (exp)
227 tree exp;
228 {
229 switch (TREE_CODE (exp))
230 {
231 case SAVE_EXPR:
232 return 1;
233
234 case ADDR_EXPR: case INDIRECT_REF:
235 case COMPONENT_REF:
236 case NOP_EXPR: case CONVERT_EXPR: case VIEW_CONVERT_EXPR:
237 return contains_save_expr_p (TREE_OPERAND (exp, 0));
238
239 case CONSTRUCTOR:
240 return (CONSTRUCTOR_ELTS (exp) != 0
241 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp)));
242
243 case TREE_LIST:
244 return (contains_save_expr_p (TREE_VALUE (exp))
245 || (TREE_CHAIN (exp) != 0
246 && contains_save_expr_p (TREE_CHAIN (exp))));
247
248 default:
249 return 0;
250 }
251 }
252 \f
253 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
254 it if so. This is used to detect types whose sizes involve computations
255 that are known to raise Constraint_Error. */
256
257 static tree
258 contains_null_expr (exp)
259 tree exp;
260 {
261 tree tem;
262
263 if (TREE_CODE (exp) == NULL_EXPR)
264 return exp;
265
266 switch (TREE_CODE_CLASS (TREE_CODE (exp)))
267 {
268 case '1':
269 return contains_null_expr (TREE_OPERAND (exp, 0));
270
271 case '<': case '2':
272 tem = contains_null_expr (TREE_OPERAND (exp, 0));
273 if (tem != 0)
274 return tem;
275
276 return contains_null_expr (TREE_OPERAND (exp, 1));
277
278 case 'e':
279 switch (TREE_CODE (exp))
280 {
281 case SAVE_EXPR:
282 return contains_null_expr (TREE_OPERAND (exp, 0));
283
284 case COND_EXPR:
285 tem = contains_null_expr (TREE_OPERAND (exp, 0));
286 if (tem != 0)
287 return tem;
288
289 tem = contains_null_expr (TREE_OPERAND (exp, 1));
290 if (tem != 0)
291 return tem;
292
293 return contains_null_expr (TREE_OPERAND (exp, 2));
294
295 default:
296 return 0;
297 }
298
299 default:
300 return 0;
301 }
302 }
303 \f
304 /* Return an expression tree representing an equality comparison of
305 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
306 be of type RESULT_TYPE
307
308 Two arrays are equal in one of two ways: (1) if both have zero length
309 in some dimension (not necessarily the same dimension) or (2) if the
310 lengths in each dimension are equal and the data is equal. We perform the
311 length tests in as efficient a manner as possible. */
312
313 static tree
314 compare_arrays (result_type, a1, a2)
315 tree a1, a2;
316 tree result_type;
317 {
318 tree t1 = TREE_TYPE (a1);
319 tree t2 = TREE_TYPE (a2);
320 tree result = convert (result_type, integer_one_node);
321 tree a1_is_null = convert (result_type, integer_zero_node);
322 tree a2_is_null = convert (result_type, integer_zero_node);
323 int length_zero_p = 0;
324
325 /* Process each dimension separately and compare the lengths. If any
326 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
327 suppress the comparison of the data. */
328 while (TREE_CODE (t1) == ARRAY_TYPE && TREE_CODE (t2) == ARRAY_TYPE)
329 {
330 tree lb1 = TYPE_MIN_VALUE (TYPE_DOMAIN (t1));
331 tree ub1 = TYPE_MAX_VALUE (TYPE_DOMAIN (t1));
332 tree lb2 = TYPE_MIN_VALUE (TYPE_DOMAIN (t2));
333 tree ub2 = TYPE_MAX_VALUE (TYPE_DOMAIN (t2));
334 tree bt = get_base_type (TREE_TYPE (lb1));
335 tree length1 = fold (build (MINUS_EXPR, bt, ub1, lb1));
336 tree length2 = fold (build (MINUS_EXPR, bt, ub2, lb2));
337 tree nbt;
338 tree tem;
339 tree comparison, this_a1_is_null, this_a2_is_null;
340
341 /* If the length of the first array is a constant, swap our operands
342 unless the length of the second array is the constant zero.
343 Note that we have set the `length' values to the length - 1. */
344 if (TREE_CODE (length1) == INTEGER_CST
345 && ! integer_zerop (fold (build (PLUS_EXPR, bt, length2,
346 convert (bt, integer_one_node)))))
347 {
348 tem = a1, a1 = a2, a2 = tem;
349 tem = t1, t1 = t2, t2 = tem;
350 tem = lb1, lb1 = lb2, lb2 = tem;
351 tem = ub1, ub1 = ub2, ub2 = tem;
352 tem = length1, length1 = length2, length2 = tem;
353 tem = a1_is_null, a1_is_null = a2_is_null, a2_is_null = tem;
354 }
355
356 /* If the length of this dimension in the second array is the constant
357 zero, we can just go inside the original bounds for the first
358 array and see if last < first. */
359 if (integer_zerop (fold (build (PLUS_EXPR, bt, length2,
360 convert (bt, integer_one_node)))))
361 {
362 tree ub = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
363 tree lb = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
364
365 comparison = build_binary_op (LT_EXPR, result_type, ub, lb);
366
367 if (contains_placeholder_p (comparison))
368 comparison = build (WITH_RECORD_EXPR, result_type,
369 comparison, a1);
370 if (contains_placeholder_p (length1))
371 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
372
373 length_zero_p = 1;
374
375 this_a1_is_null = comparison;
376 this_a2_is_null = convert (result_type, integer_one_node);
377 }
378
379 /* If the length is some other constant value, we know that the
380 this dimension in the first array cannot be superflat, so we
381 can just use its length from the actual stored bounds. */
382 else if (TREE_CODE (length2) == INTEGER_CST)
383 {
384 ub1 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
385 lb1 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1)));
386 ub2 = TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
387 lb2 = TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2)));
388 nbt = get_base_type (TREE_TYPE (ub1));
389
390 comparison
391 = build_binary_op (EQ_EXPR, result_type,
392 build_binary_op (MINUS_EXPR, nbt, ub1, lb1),
393 build_binary_op (MINUS_EXPR, nbt, ub2, lb2));
394
395 /* Note that we know that UB2 and LB2 are constant and hence
396 cannot contain a PLACEHOLDER_EXPR. */
397
398 if (contains_placeholder_p (comparison))
399 comparison = build (WITH_RECORD_EXPR, result_type, comparison, a1);
400 if (contains_placeholder_p (length1))
401 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
402
403 this_a1_is_null = build_binary_op (LT_EXPR, result_type, ub1, lb1);
404 this_a2_is_null = convert (result_type, integer_zero_node);
405 }
406
407 /* Otherwise compare the computed lengths. */
408 else
409 {
410 if (contains_placeholder_p (length1))
411 length1 = build (WITH_RECORD_EXPR, bt, length1, a1);
412 if (contains_placeholder_p (length2))
413 length2 = build (WITH_RECORD_EXPR, bt, length2, a2);
414
415 comparison
416 = build_binary_op (EQ_EXPR, result_type, length1, length2);
417
418 this_a1_is_null
419 = build_binary_op (LT_EXPR, result_type, length1,
420 convert (bt, integer_zero_node));
421 this_a2_is_null
422 = build_binary_op (LT_EXPR, result_type, length2,
423 convert (bt, integer_zero_node));
424 }
425
426 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type,
427 result, comparison);
428
429 a1_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
430 this_a1_is_null, a1_is_null);
431 a2_is_null = build_binary_op (TRUTH_ORIF_EXPR, result_type,
432 this_a2_is_null, a2_is_null);
433
434 t1 = TREE_TYPE (t1);
435 t2 = TREE_TYPE (t2);
436 }
437
438 /* Unless the size of some bound is known to be zero, compare the
439 data in the array. */
440 if (! length_zero_p)
441 {
442 tree type = find_common_type (TREE_TYPE (a1), TREE_TYPE (a2));
443
444 if (type != 0)
445 a1 = convert (type, a1), a2 = convert (type, a2);
446
447
448 result = build_binary_op (TRUTH_ANDIF_EXPR, result_type, result,
449 fold (build (EQ_EXPR, result_type, a1, a2)));
450
451 }
452
453 /* The result is also true if both sizes are zero. */
454 result = build_binary_op (TRUTH_ORIF_EXPR, result_type,
455 build_binary_op (TRUTH_ANDIF_EXPR, result_type,
456 a1_is_null, a2_is_null),
457 result);
458
459 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
460 starting the comparison above since the place it would be otherwise
461 evaluated would be wrong. */
462
463 if (contains_save_expr_p (a1))
464 result = build (COMPOUND_EXPR, result_type, a1, result);
465
466 if (contains_save_expr_p (a2))
467 result = build (COMPOUND_EXPR, result_type, a2, result);
468
469 return result;
470 }
471 \f
472 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
473 type TYPE. We know that TYPE is a modular type with a nonbinary
474 modulus. */
475
476 static tree
477 nonbinary_modular_operation (op_code, type, lhs, rhs)
478 enum tree_code op_code;
479 tree type;
480 tree lhs, rhs;
481 {
482 tree modulus = TYPE_MODULUS (type);
483 unsigned int needed_precision = tree_floor_log2 (modulus) + 1;
484 unsigned int precision;
485 int unsignedp = 1;
486 tree op_type = type;
487 tree result;
488
489 /* If this is an addition of a constant, convert it to a subtraction
490 of a constant since we can do that faster. */
491 if (op_code == PLUS_EXPR && TREE_CODE (rhs) == INTEGER_CST)
492 rhs = fold (build (MINUS_EXPR, type, modulus, rhs)), op_code = MINUS_EXPR;
493
494 /* For the logical operations, we only need PRECISION bits. For
495 addition and subraction, we need one more and for multiplication we
496 need twice as many. But we never want to make a size smaller than
497 our size. */
498 if (op_code == PLUS_EXPR || op_code == MINUS_EXPR)
499 needed_precision += 1;
500 else if (op_code == MULT_EXPR)
501 needed_precision *= 2;
502
503 precision = MAX (needed_precision, TYPE_PRECISION (op_type));
504
505 /* Unsigned will do for everything but subtraction. */
506 if (op_code == MINUS_EXPR)
507 unsignedp = 0;
508
509 /* If our type is the wrong signedness or isn't wide enough, make a new
510 type and convert both our operands to it. */
511 if (TYPE_PRECISION (op_type) < precision
512 || TREE_UNSIGNED (op_type) != unsignedp)
513 {
514 /* Copy the node so we ensure it can be modified to make it modular. */
515 op_type = copy_node (gnat_type_for_size (precision, unsignedp));
516 modulus = convert (op_type, modulus);
517 TYPE_MODULUS (op_type) = modulus;
518 TYPE_MODULAR_P (op_type) = 1;
519 lhs = convert (op_type, lhs);
520 rhs = convert (op_type, rhs);
521 }
522
523 /* Do the operation, then we'll fix it up. */
524 result = fold (build (op_code, op_type, lhs, rhs));
525
526 /* For multiplication, we have no choice but to do a full modulus
527 operation. However, we want to do this in the narrowest
528 possible size. */
529 if (op_code == MULT_EXPR)
530 {
531 tree div_type = copy_node (gnat_type_for_size (needed_precision, 1));
532 modulus = convert (div_type, modulus);
533 TYPE_MODULUS (div_type) = modulus;
534 TYPE_MODULAR_P (div_type) = 1;
535 result = convert (op_type,
536 fold (build (TRUNC_MOD_EXPR, div_type,
537 convert (div_type, result), modulus)));
538 }
539
540 /* For subtraction, add the modulus back if we are negative. */
541 else if (op_code == MINUS_EXPR)
542 {
543 result = save_expr (result);
544 result = fold (build (COND_EXPR, op_type,
545 build (LT_EXPR, integer_type_node, result,
546 convert (op_type, integer_zero_node)),
547 fold (build (PLUS_EXPR, op_type,
548 result, modulus)),
549 result));
550 }
551
552 /* For the other operations, subtract the modulus if we are >= it. */
553 else
554 {
555 result = save_expr (result);
556 result = fold (build (COND_EXPR, op_type,
557 build (GE_EXPR, integer_type_node,
558 result, modulus),
559 fold (build (MINUS_EXPR, op_type,
560 result, modulus)),
561 result));
562 }
563
564 return convert (type, result);
565 }
566 \f
567 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
568 desired for the result. Usually the operation is to be performed
569 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
570 in which case the type to be used will be derived from the operands.
571
572 This function is very much unlike the ones for C and C++ since we
573 have already done any type conversion and matching required. All we
574 have to do here is validate the work done by SEM and handle subtypes. */
575
576 tree
577 build_binary_op (op_code, result_type, left_operand, right_operand)
578 enum tree_code op_code;
579 tree result_type;
580 tree left_operand;
581 tree right_operand;
582 {
583 tree left_type = TREE_TYPE (left_operand);
584 tree right_type = TREE_TYPE (right_operand);
585 tree left_base_type = get_base_type (left_type);
586 tree right_base_type = get_base_type (right_type);
587 tree operation_type = result_type;
588 tree best_type = 0;
589 tree modulus;
590 tree result;
591 int has_side_effects = 0;
592
593 /* If one (but not both, unless they have the same object) operands are a
594 WITH_RECORD_EXPR, do the operation and then surround it with the
595 WITH_RECORD_EXPR. Don't do this for assignment, for an ARRAY_REF, or
596 for an ARRAY_RANGE_REF because we need to keep track of the
597 WITH_RECORD_EXPRs on both operands very carefully. */
598 if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
599 && op_code != ARRAY_RANGE_REF
600 && TREE_CODE (left_operand) == WITH_RECORD_EXPR
601 && (TREE_CODE (right_operand) != WITH_RECORD_EXPR
602 || operand_equal_p (TREE_OPERAND (left_operand, 1),
603 TREE_OPERAND (right_operand, 1), 0)))
604 {
605 tree right = right_operand;
606
607 if (TREE_CODE (right) == WITH_RECORD_EXPR)
608 right = TREE_OPERAND (right, 0);
609
610 result = build_binary_op (op_code, result_type,
611 TREE_OPERAND (left_operand, 0), right);
612 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
613 TREE_OPERAND (left_operand, 1));
614 }
615 else if (op_code != MODIFY_EXPR && op_code != ARRAY_REF
616 && op_code != ARRAY_RANGE_REF
617 && TREE_CODE (left_operand) != WITH_RECORD_EXPR
618 && TREE_CODE (right_operand) == WITH_RECORD_EXPR)
619 {
620 result = build_binary_op (op_code, result_type, left_operand,
621 TREE_OPERAND (right_operand, 0));
622 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
623 TREE_OPERAND (right_operand, 1));
624 }
625
626 if (operation_type != 0
627 && TREE_CODE (operation_type) == RECORD_TYPE
628 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
629 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
630
631 if (operation_type != 0
632 && ! AGGREGATE_TYPE_P (operation_type)
633 && TYPE_EXTRA_SUBTYPE_P (operation_type))
634 operation_type = get_base_type (operation_type);
635
636 modulus = (operation_type != 0 && TREE_CODE (operation_type) == INTEGER_TYPE
637 && TYPE_MODULAR_P (operation_type)
638 ? TYPE_MODULUS (operation_type) : 0);
639
640 switch (op_code)
641 {
642 case MODIFY_EXPR:
643 /* If there were any integral or pointer conversions on LHS, remove
644 them; we'll be putting them back below if needed. Likewise for
645 conversions between array and record types. But don't do this if
646 the right operand is not BLKmode (for packed arrays)
647 unless we are not changing the mode. */
648 while ((TREE_CODE (left_operand) == CONVERT_EXPR
649 || TREE_CODE (left_operand) == NOP_EXPR
650 || TREE_CODE (left_operand) == VIEW_CONVERT_EXPR)
651 && (((INTEGRAL_TYPE_P (left_type)
652 || POINTER_TYPE_P (left_type))
653 && (INTEGRAL_TYPE_P (TREE_TYPE
654 (TREE_OPERAND (left_operand, 0)))
655 || POINTER_TYPE_P (TREE_TYPE
656 (TREE_OPERAND (left_operand, 0)))))
657 || (((TREE_CODE (left_type) == RECORD_TYPE
658 /* Don't remove conversions to left-justified modular
659 types. */
660 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
661 || TREE_CODE (left_type) == ARRAY_TYPE)
662 && ((TREE_CODE (TREE_TYPE
663 (TREE_OPERAND (left_operand, 0)))
664 == RECORD_TYPE)
665 || (TREE_CODE (TREE_TYPE
666 (TREE_OPERAND (left_operand, 0)))
667 == ARRAY_TYPE))
668 && (TYPE_MODE (right_type) == BLKmode
669 || (TYPE_MODE (left_type)
670 == TYPE_MODE (TREE_TYPE
671 (TREE_OPERAND
672 (left_operand, 0))))))))
673 {
674 left_operand = TREE_OPERAND (left_operand, 0);
675 left_type = TREE_TYPE (left_operand);
676 }
677
678 if (operation_type == 0)
679 operation_type = left_type;
680
681 /* If the RHS has a conversion between record and array types and
682 an inner type is no worse, use it. Note we cannot do this for
683 modular types or types with TYPE_ALIGN_OK, since the latter
684 might indicate a conversion between a root type and a class-wide
685 type, which we must not remove. */
686 while (TREE_CODE (right_operand) == VIEW_CONVERT_EXPR
687 && ((TREE_CODE (right_type) == RECORD_TYPE
688 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type)
689 && ! TYPE_ALIGN_OK (right_type)
690 && ! TYPE_IS_FAT_POINTER_P (right_type))
691 || TREE_CODE (right_type) == ARRAY_TYPE)
692 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
693 == RECORD_TYPE)
694 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
695 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
696 && ! (TYPE_ALIGN_OK
697 (TREE_TYPE (TREE_OPERAND (right_operand, 0))))
698 && ! (TYPE_IS_FAT_POINTER_P
699 (TREE_TYPE (TREE_OPERAND (right_operand, 0)))))
700 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand, 0)))
701 == ARRAY_TYPE))
702 && (0 == (best_type
703 == find_common_type (right_type,
704 TREE_TYPE (TREE_OPERAND
705 (right_operand, 0))))
706 || right_type != best_type))
707 {
708 right_operand = TREE_OPERAND (right_operand, 0);
709 right_type = TREE_TYPE (right_operand);
710 }
711
712 /* If we are copying one array or record to another, find the best type
713 to use. */
714 if (((TREE_CODE (left_type) == ARRAY_TYPE
715 && TREE_CODE (right_type) == ARRAY_TYPE)
716 || (TREE_CODE (left_type) == RECORD_TYPE
717 && TREE_CODE (right_type) == RECORD_TYPE))
718 && (best_type = find_common_type (left_type, right_type)) != 0)
719 operation_type = best_type;
720
721 /* If a class-wide type may be involved, force use of the RHS type. */
722 if (TREE_CODE (right_type) == RECORD_TYPE && TYPE_ALIGN_OK (right_type))
723 operation_type = right_type;
724
725 /* Ensure everything on the LHS is valid. If we have a field reference,
726 strip anything that get_inner_reference can handle. Then remove any
727 conversions with type types having the same code and mode. Mark
728 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
729 either an INDIRECT_REF or a decl. */
730 result = left_operand;
731 while (1)
732 {
733 tree restype = TREE_TYPE (result);
734
735 if (TREE_CODE (result) == COMPONENT_REF
736 || TREE_CODE (result) == ARRAY_REF
737 || TREE_CODE (result) == ARRAY_RANGE_REF)
738 while (handled_component_p (result))
739 result = TREE_OPERAND (result, 0);
740 else if (TREE_CODE (result) == REALPART_EXPR
741 || TREE_CODE (result) == IMAGPART_EXPR
742 || TREE_CODE (result) == WITH_RECORD_EXPR
743 || ((TREE_CODE (result) == NOP_EXPR
744 || TREE_CODE (result) == CONVERT_EXPR)
745 && (((TREE_CODE (restype)
746 == TREE_CODE (TREE_TYPE
747 (TREE_OPERAND (result, 0))))
748 && (TYPE_MODE (TREE_TYPE
749 (TREE_OPERAND (result, 0)))
750 == TYPE_MODE (restype)))
751 || TYPE_ALIGN_OK (restype))))
752 result = TREE_OPERAND (result, 0);
753 else if (TREE_CODE (result) == VIEW_CONVERT_EXPR)
754 {
755 TREE_ADDRESSABLE (result) = 1;
756 result = TREE_OPERAND (result, 0);
757 }
758 else
759 break;
760 }
761
762 if (TREE_CODE (result) != INDIRECT_REF && TREE_CODE (result) != NULL_EXPR
763 && ! DECL_P (result))
764 gigi_abort (516);
765
766 /* Convert the right operand to the operation type unless
767 it is either already of the correct type or if the type
768 involves a placeholder, since the RHS may not have the same
769 record type. */
770 if (operation_type != right_type
771 && (! (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
772 && contains_placeholder_p (TYPE_SIZE (operation_type)))))
773 {
774 /* For a variable-size type, with both BLKmode, convert using
775 CONVERT_EXPR instead of an unchecked conversion since we don't
776 need to make a temporary (and can't anyway). */
777 if (TREE_CODE (TYPE_SIZE (operation_type)) != INTEGER_CST
778 && TYPE_MODE (TREE_TYPE (right_operand)) == BLKmode
779 && TREE_CODE (right_operand) != UNCONSTRAINED_ARRAY_REF)
780 right_operand = build1 (CONVERT_EXPR, operation_type,
781 right_operand);
782 else
783 right_operand = convert (operation_type, right_operand);
784
785 right_type = operation_type;
786 }
787
788 /* If the modes differ, make up a bogus type and convert the RHS to
789 it. This can happen with packed types. */
790 if (TYPE_MODE (left_type) != TYPE_MODE (right_type))
791 {
792 tree new_type = copy_node (left_type);
793
794 TYPE_SIZE (new_type) = TYPE_SIZE (right_type);
795 TYPE_SIZE_UNIT (new_type) = TYPE_SIZE_UNIT (right_type);
796 TYPE_MAIN_VARIANT (new_type) = new_type;
797 right_operand = convert (new_type, right_operand);
798 }
799
800 has_side_effects = 1;
801 modulus = 0;
802 break;
803
804 case ARRAY_REF:
805 if (operation_type == 0)
806 operation_type = TREE_TYPE (left_type);
807
808 /* ... fall through ... */
809
810 case ARRAY_RANGE_REF:
811
812 /* First convert the right operand to its base type. This will
813 prevent unneed signedness conversions when sizetype is wider than
814 integer. */
815 right_operand = convert (right_base_type, right_operand);
816 right_operand = convert (TYPE_DOMAIN (left_type), right_operand);
817
818 if (! TREE_CONSTANT (right_operand)
819 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type))
820 || op_code == ARRAY_RANGE_REF)
821 gnat_mark_addressable (left_operand);
822
823 modulus = 0;
824 break;
825
826 case GE_EXPR:
827 case LE_EXPR:
828 case GT_EXPR:
829 case LT_EXPR:
830 if (POINTER_TYPE_P (left_type))
831 gigi_abort (501);
832
833 /* ... fall through ... */
834
835 case EQ_EXPR:
836 case NE_EXPR:
837 /* If either operand is a NULL_EXPR, just return a new one. */
838 if (TREE_CODE (left_operand) == NULL_EXPR)
839 return build (op_code, result_type,
840 build1 (NULL_EXPR, integer_type_node,
841 TREE_OPERAND (left_operand, 0)),
842 integer_zero_node);
843
844 else if (TREE_CODE (right_operand) == NULL_EXPR)
845 return build (op_code, result_type,
846 build1 (NULL_EXPR, integer_type_node,
847 TREE_OPERAND (right_operand, 0)),
848 integer_zero_node);
849
850 /* If either object is a left-justified modular types, get the
851 fields from within. */
852 if (TREE_CODE (left_type) == RECORD_TYPE
853 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type))
854 {
855 left_operand = convert (TREE_TYPE (TYPE_FIELDS (left_type)),
856 left_operand);
857 left_type = TREE_TYPE (left_operand);
858 left_base_type = get_base_type (left_type);
859 }
860
861 if (TREE_CODE (right_type) == RECORD_TYPE
862 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type))
863 {
864 right_operand = convert (TREE_TYPE (TYPE_FIELDS (right_type)),
865 right_operand);
866 right_type = TREE_TYPE (right_operand);
867 right_base_type = get_base_type (right_type);
868 }
869
870 /* If both objects are arrays, compare them specially. */
871 if ((TREE_CODE (left_type) == ARRAY_TYPE
872 || (TREE_CODE (left_type) == INTEGER_TYPE
873 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type)))
874 && (TREE_CODE (right_type) == ARRAY_TYPE
875 || (TREE_CODE (right_type) == INTEGER_TYPE
876 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type))))
877 {
878 result = compare_arrays (result_type, left_operand, right_operand);
879
880 if (op_code == EQ_EXPR)
881 ;
882 else if (op_code == NE_EXPR)
883 result = invert_truthvalue (result);
884 else
885 gigi_abort (502);
886
887 return result;
888 }
889
890 /* Otherwise, the base types must be the same unless the objects are
891 records. If we have records, use the best type and convert both
892 operands to that type. */
893 if (left_base_type != right_base_type)
894 {
895 if (TREE_CODE (left_base_type) == RECORD_TYPE
896 && TREE_CODE (right_base_type) == RECORD_TYPE)
897 {
898 /* The only way these are permitted to be the same is if both
899 types have the same name. In that case, one of them must
900 not be self-referential. Use that one as the best type.
901 Even better is if one is of fixed size. */
902 best_type = 0;
903
904 if (TYPE_NAME (left_base_type) == 0
905 || TYPE_NAME (left_base_type) != TYPE_NAME (right_base_type))
906 gigi_abort (503);
907
908 if (TREE_CONSTANT (TYPE_SIZE (left_base_type)))
909 best_type = left_base_type;
910 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type)))
911 best_type = right_base_type;
912 else if (! contains_placeholder_p (TYPE_SIZE (left_base_type)))
913 best_type = left_base_type;
914 else if (! contains_placeholder_p (TYPE_SIZE (right_base_type)))
915 best_type = right_base_type;
916 else
917 gigi_abort (504);
918
919 left_operand = convert (best_type, left_operand);
920 right_operand = convert (best_type, right_operand);
921 }
922 else
923 gigi_abort (505);
924 }
925
926 /* If we are comparing a fat pointer against zero, we need to
927 just compare the data pointer. */
928 else if (TYPE_FAT_POINTER_P (left_base_type)
929 && TREE_CODE (right_operand) == CONSTRUCTOR
930 && integer_zerop (TREE_VALUE (TREE_OPERAND (right_operand, 1))))
931 {
932 right_operand = build_component_ref (left_operand, NULL_TREE,
933 TYPE_FIELDS (left_base_type));
934 left_operand = convert (TREE_TYPE (right_operand),
935 integer_zero_node);
936 }
937 else
938 {
939 left_operand = convert (left_base_type, left_operand);
940 right_operand = convert (right_base_type, right_operand);
941 }
942
943 modulus = 0;
944 break;
945
946 case PREINCREMENT_EXPR:
947 case PREDECREMENT_EXPR:
948 case POSTINCREMENT_EXPR:
949 case POSTDECREMENT_EXPR:
950 /* In these, the result type and the left operand type should be the
951 same. Do the operation in the base type of those and convert the
952 right operand (which is an integer) to that type.
953
954 Note that these operations are only used in loop control where
955 we guarantee that no overflow can occur. So nothing special need
956 be done for modular types. */
957
958 if (left_type != result_type)
959 gigi_abort (506);
960
961 operation_type = get_base_type (result_type);
962 left_operand = convert (operation_type, left_operand);
963 right_operand = convert (operation_type, right_operand);
964 has_side_effects = 1;
965 modulus = 0;
966 break;
967
968 case LSHIFT_EXPR:
969 case RSHIFT_EXPR:
970 case LROTATE_EXPR:
971 case RROTATE_EXPR:
972 /* The RHS of a shift can be any type. Also, ignore any modulus
973 (we used to abort, but this is needed for unchecked conversion
974 to modular types). Otherwise, processing is the same as normal. */
975 if (operation_type != left_base_type)
976 gigi_abort (514);
977
978 modulus = 0;
979 left_operand = convert (operation_type, left_operand);
980 break;
981
982 case TRUTH_ANDIF_EXPR:
983 case TRUTH_ORIF_EXPR:
984 case TRUTH_AND_EXPR:
985 case TRUTH_OR_EXPR:
986 case TRUTH_XOR_EXPR:
987 left_operand = gnat_truthvalue_conversion (left_operand);
988 right_operand = gnat_truthvalue_conversion (right_operand);
989 goto common;
990
991 case BIT_AND_EXPR:
992 case BIT_IOR_EXPR:
993 case BIT_XOR_EXPR:
994 /* For binary modulus, if the inputs are in range, so are the
995 outputs. */
996 if (modulus != 0 && integer_pow2p (modulus))
997 modulus = 0;
998
999 goto common;
1000
1001 case COMPLEX_EXPR:
1002 if (TREE_TYPE (result_type) != left_base_type
1003 || TREE_TYPE (result_type) != right_base_type)
1004 gigi_abort (515);
1005
1006 left_operand = convert (left_base_type, left_operand);
1007 right_operand = convert (right_base_type, right_operand);
1008 break;
1009
1010 case TRUNC_DIV_EXPR: case TRUNC_MOD_EXPR:
1011 case CEIL_DIV_EXPR: case CEIL_MOD_EXPR:
1012 case FLOOR_DIV_EXPR: case FLOOR_MOD_EXPR:
1013 case ROUND_DIV_EXPR: case ROUND_MOD_EXPR:
1014 /* These always produce results lower than either operand. */
1015 modulus = 0;
1016 goto common;
1017
1018 default:
1019 common:
1020 /* The result type should be the same as the base types of the
1021 both operands (and they should be the same). Convert
1022 everything to the result type. */
1023
1024 if (operation_type != left_base_type
1025 || left_base_type != right_base_type)
1026 gigi_abort (507);
1027
1028 left_operand = convert (operation_type, left_operand);
1029 right_operand = convert (operation_type, right_operand);
1030 }
1031
1032 if (modulus != 0 && ! integer_pow2p (modulus))
1033 {
1034 result = nonbinary_modular_operation (op_code, operation_type,
1035 left_operand, right_operand);
1036 modulus = 0;
1037 }
1038 /* If either operand is a NULL_EXPR, just return a new one. */
1039 else if (TREE_CODE (left_operand) == NULL_EXPR)
1040 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (left_operand, 0));
1041 else if (TREE_CODE (right_operand) == NULL_EXPR)
1042 return build1 (NULL_EXPR, operation_type, TREE_OPERAND (right_operand, 0));
1043 else
1044 result = fold (build (op_code, operation_type,
1045 left_operand, right_operand));
1046
1047 TREE_SIDE_EFFECTS (result) |= has_side_effects;
1048 TREE_CONSTANT (result)
1049 |= (TREE_CONSTANT (left_operand) & TREE_CONSTANT (right_operand)
1050 && op_code != ARRAY_REF && op_code != ARRAY_RANGE_REF);
1051
1052 if ((op_code == ARRAY_REF || op_code == ARRAY_RANGE_REF)
1053 && TYPE_VOLATILE (operation_type))
1054 TREE_THIS_VOLATILE (result) = 1;
1055
1056 /* If we are working with modular types, perform the MOD operation
1057 if something above hasn't eliminated the need for it. */
1058 if (modulus != 0)
1059 result = fold (build (FLOOR_MOD_EXPR, operation_type, result,
1060 convert (operation_type, modulus)));
1061
1062 if (result_type != 0 && result_type != operation_type)
1063 result = convert (result_type, result);
1064
1065 return result;
1066 }
1067 \f
1068 /* Similar, but for unary operations. */
1069
1070 tree
1071 build_unary_op (op_code, result_type, operand)
1072 enum tree_code op_code;
1073 tree result_type;
1074 tree operand;
1075 {
1076 tree type = TREE_TYPE (operand);
1077 tree base_type = get_base_type (type);
1078 tree operation_type = result_type;
1079 tree result;
1080 int side_effects = 0;
1081
1082 /* If we have a WITH_RECORD_EXPR as our operand, do the operation first,
1083 then surround it with the WITH_RECORD_EXPR. This allows GCC to do better
1084 expression folding. */
1085 if (TREE_CODE (operand) == WITH_RECORD_EXPR)
1086 {
1087 result = build_unary_op (op_code, result_type,
1088 TREE_OPERAND (operand, 0));
1089 return build (WITH_RECORD_EXPR, TREE_TYPE (result), result,
1090 TREE_OPERAND (operand, 1));
1091 }
1092
1093 if (operation_type != 0
1094 && TREE_CODE (operation_type) == RECORD_TYPE
1095 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type))
1096 operation_type = TREE_TYPE (TYPE_FIELDS (operation_type));
1097
1098 if (operation_type != 0
1099 && ! AGGREGATE_TYPE_P (operation_type)
1100 && TYPE_EXTRA_SUBTYPE_P (operation_type))
1101 operation_type = get_base_type (operation_type);
1102
1103 switch (op_code)
1104 {
1105 case REALPART_EXPR:
1106 case IMAGPART_EXPR:
1107 if (operation_type == 0)
1108 result_type = operation_type = TREE_TYPE (type);
1109 else if (result_type != TREE_TYPE (type))
1110 gigi_abort (513);
1111
1112 result = fold (build1 (op_code, operation_type, operand));
1113 break;
1114
1115 case TRUTH_NOT_EXPR:
1116 if (result_type != base_type)
1117 gigi_abort (508);
1118
1119 result = invert_truthvalue (gnat_truthvalue_conversion (operand));
1120 break;
1121
1122 case ATTR_ADDR_EXPR:
1123 case ADDR_EXPR:
1124 switch (TREE_CODE (operand))
1125 {
1126 case INDIRECT_REF:
1127 case UNCONSTRAINED_ARRAY_REF:
1128 result = TREE_OPERAND (operand, 0);
1129
1130 /* Make sure the type here is a pointer, not a reference.
1131 GCC wants pointer types for function addresses. */
1132 if (result_type == 0)
1133 result_type = build_pointer_type (type);
1134 break;
1135
1136 case NULL_EXPR:
1137 result = operand;
1138 TREE_TYPE (result) = type = build_pointer_type (type);
1139 break;
1140
1141 case ARRAY_REF:
1142 case ARRAY_RANGE_REF:
1143 case COMPONENT_REF:
1144 case BIT_FIELD_REF:
1145 /* If this is for 'Address, find the address of the prefix and
1146 add the offset to the field. Otherwise, do this the normal
1147 way. */
1148 if (op_code == ATTR_ADDR_EXPR)
1149 {
1150 HOST_WIDE_INT bitsize;
1151 HOST_WIDE_INT bitpos;
1152 tree offset, inner;
1153 enum machine_mode mode;
1154 int unsignedp, volatilep;
1155
1156 inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
1157 &mode, &unsignedp, &volatilep);
1158
1159 /* If INNER is a padding type whose field has a self-referential
1160 size, convert to that inner type. We know the offset is zero
1161 and we need to have that type visible. */
1162 if (TREE_CODE (TREE_TYPE (inner)) == RECORD_TYPE
1163 && TYPE_IS_PADDING_P (TREE_TYPE (inner))
1164 && (contains_placeholder_p
1165 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1166 (TREE_TYPE (inner)))))))
1167 inner = convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner))),
1168 inner);
1169
1170 /* Compute the offset as a byte offset from INNER. */
1171 if (offset == 0)
1172 offset = size_zero_node;
1173
1174 if (bitpos % BITS_PER_UNIT != 0)
1175 post_error
1176 ("taking address of object not aligned on storage unit?",
1177 error_gnat_node);
1178
1179 offset = size_binop (PLUS_EXPR, offset,
1180 size_int (bitpos / BITS_PER_UNIT));
1181
1182 /* Take the address of INNER, convert the offset to void *, and
1183 add then. It will later be converted to the desired result
1184 type, if any. */
1185 inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
1186 inner = convert (ptr_void_type_node, inner);
1187 offset = convert (ptr_void_type_node, offset);
1188 result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
1189 inner, offset);
1190 result = convert (build_pointer_type (TREE_TYPE (operand)),
1191 result);
1192 break;
1193 }
1194 goto common;
1195
1196 case CONSTRUCTOR:
1197 /* If this is just a constructor for a padded record, we can
1198 just take the address of the single field and convert it to
1199 a pointer to our type. */
1200 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1201 {
1202 result
1203 = build_unary_op (ADDR_EXPR, NULL_TREE,
1204 TREE_VALUE (CONSTRUCTOR_ELTS (operand)));
1205 result = convert (build_pointer_type (TREE_TYPE (operand)),
1206 result);
1207 break;
1208 }
1209
1210 goto common;
1211
1212 case NOP_EXPR:
1213 if (AGGREGATE_TYPE_P (type)
1214 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand, 0))))
1215 return build_unary_op (ADDR_EXPR, result_type,
1216 TREE_OPERAND (operand, 0));
1217
1218 /* If this NOP_EXPR doesn't change the mode, get the result type
1219 from this type and go down. We need to do this in case
1220 this is a conversion of a CONST_DECL. */
1221 if (TYPE_MODE (type) != BLKmode
1222 && (TYPE_MODE (type)
1223 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand, 0)))))
1224 return build_unary_op (ADDR_EXPR,
1225 (result_type == 0
1226 ? build_pointer_type (type)
1227 : result_type),
1228 TREE_OPERAND (operand, 0));
1229 goto common;
1230
1231 case CONST_DECL:
1232 operand = DECL_CONST_CORRESPONDING_VAR (operand);
1233
1234 /* ... fall through ... */
1235
1236 default:
1237 common:
1238
1239 if (type != error_mark_node)
1240 operation_type = build_pointer_type (type);
1241
1242 gnat_mark_addressable (operand);
1243 result = fold (build1 (ADDR_EXPR, operation_type, operand));
1244 }
1245
1246 TREE_CONSTANT (result) = staticp (operand) || TREE_CONSTANT (operand);
1247 break;
1248
1249 case INDIRECT_REF:
1250 /* If we want to refer to an entire unconstrained array,
1251 make up an expression to do so. This will never survive to
1252 the backend. If TYPE is a thin pointer, first convert the
1253 operand to a fat pointer. */
1254 if (TYPE_THIN_POINTER_P (type)
1255 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type)) != 0)
1256 {
1257 operand
1258 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type))),
1259 operand);
1260 type = TREE_TYPE (operand);
1261 }
1262
1263 if (TYPE_FAT_POINTER_P (type))
1264 result = build1 (UNCONSTRAINED_ARRAY_REF,
1265 TYPE_UNCONSTRAINED_ARRAY (type), operand);
1266
1267 else if (TREE_CODE (operand) == ADDR_EXPR)
1268 result = TREE_OPERAND (operand, 0);
1269
1270 else
1271 {
1272 result = fold (build1 (op_code, TREE_TYPE (type), operand));
1273 TREE_READONLY (result) = TREE_READONLY (TREE_TYPE (type));
1274 }
1275
1276 side_effects = flag_volatile
1277 || (! TYPE_FAT_POINTER_P (type) && TYPE_VOLATILE (TREE_TYPE (type)));
1278 break;
1279
1280 case NEGATE_EXPR:
1281 case BIT_NOT_EXPR:
1282 {
1283 tree modulus = ((operation_type != 0
1284 && TREE_CODE (operation_type) == INTEGER_TYPE
1285 && TYPE_MODULAR_P (operation_type))
1286 ? TYPE_MODULUS (operation_type) : 0);
1287 int mod_pow2 = modulus != 0 && integer_pow2p (modulus);
1288
1289 /* If this is a modular type, there are various possibilities
1290 depending on the operation and whether the modulus is a
1291 power of two or not. */
1292
1293 if (modulus != 0)
1294 {
1295 if (operation_type != base_type)
1296 gigi_abort (509);
1297
1298 operand = convert (operation_type, operand);
1299
1300 /* The fastest in the negate case for binary modulus is
1301 the straightforward code; the TRUNC_MOD_EXPR below
1302 is an AND operation. */
1303 if (op_code == NEGATE_EXPR && mod_pow2)
1304 result = fold (build (TRUNC_MOD_EXPR, operation_type,
1305 fold (build1 (NEGATE_EXPR, operation_type,
1306 operand)),
1307 modulus));
1308
1309 /* For nonbinary negate case, return zero for zero operand,
1310 else return the modulus minus the operand. If the modulus
1311 is a power of two minus one, we can do the subtraction
1312 as an XOR since it is equivalent and faster on most machines. */
1313 else if (op_code == NEGATE_EXPR && ! mod_pow2)
1314 {
1315 if (integer_pow2p (fold (build (PLUS_EXPR, operation_type,
1316 modulus,
1317 convert (operation_type,
1318 integer_one_node)))))
1319 result = fold (build (BIT_XOR_EXPR, operation_type,
1320 operand, modulus));
1321 else
1322 result = fold (build (MINUS_EXPR, operation_type,
1323 modulus, operand));
1324
1325 result = fold (build (COND_EXPR, operation_type,
1326 fold (build (NE_EXPR, integer_type_node,
1327 operand,
1328 convert (operation_type,
1329 integer_zero_node))),
1330 result, operand));
1331 }
1332 else
1333 {
1334 /* For the NOT cases, we need a constant equal to
1335 the modulus minus one. For a binary modulus, we
1336 XOR against the constant and subtract the operand from
1337 that constant for nonbinary modulus. */
1338
1339 tree cnst = fold (build (MINUS_EXPR, operation_type, modulus,
1340 convert (operation_type,
1341 integer_one_node)));
1342
1343 if (mod_pow2)
1344 result = fold (build (BIT_XOR_EXPR, operation_type,
1345 operand, cnst));
1346 else
1347 result = fold (build (MINUS_EXPR, operation_type,
1348 cnst, operand));
1349 }
1350
1351 break;
1352 }
1353 }
1354
1355 /* ... fall through ... */
1356
1357 default:
1358 if (operation_type != base_type)
1359 gigi_abort (509);
1360
1361 result = fold (build1 (op_code, operation_type, convert (operation_type,
1362 operand)));
1363 }
1364
1365 if (side_effects)
1366 {
1367 TREE_SIDE_EFFECTS (result) = 1;
1368 if (TREE_CODE (result) == INDIRECT_REF)
1369 TREE_THIS_VOLATILE (result) = TYPE_VOLATILE (TREE_TYPE (result));
1370 }
1371
1372 if (result_type != 0 && TREE_TYPE (result) != result_type)
1373 result = convert (result_type, result);
1374
1375 return result;
1376 }
1377 \f
1378 /* Similar, but for COND_EXPR. */
1379
1380 tree
1381 build_cond_expr (result_type, condition_operand, true_operand, false_operand)
1382 tree result_type;
1383 tree condition_operand;
1384 tree true_operand;
1385 tree false_operand;
1386 {
1387 tree result;
1388 int addr_p = 0;
1389
1390 /* Front-end verifies that result, true and false operands have same base
1391 type. Convert everything to the result type. */
1392
1393 true_operand = convert (result_type, true_operand);
1394 false_operand = convert (result_type, false_operand);
1395
1396 /* If the result type is unconstrained, take the address of
1397 the operands and then dereference our result. */
1398
1399 if (TREE_CODE (result_type) == UNCONSTRAINED_ARRAY_TYPE
1400 || (TREE_CODE (TYPE_SIZE (result_type)) != INTEGER_CST
1401 && contains_placeholder_p (TYPE_SIZE (result_type))))
1402 {
1403 addr_p = 1;
1404 result_type = build_pointer_type (result_type);
1405 true_operand = build_unary_op (ADDR_EXPR, result_type, true_operand);
1406 false_operand = build_unary_op (ADDR_EXPR, result_type, false_operand);
1407 }
1408
1409 result = fold (build (COND_EXPR, result_type, condition_operand,
1410 true_operand, false_operand));
1411
1412 /* If either operand is a SAVE_EXPR (possibly surrounded by
1413 arithmetic, make sure it gets done. */
1414 while (TREE_CODE_CLASS (TREE_CODE (true_operand)) == '1'
1415 || (TREE_CODE_CLASS (TREE_CODE (true_operand)) == '2'
1416 && TREE_CONSTANT (TREE_OPERAND (true_operand, 1))))
1417 true_operand = TREE_OPERAND (true_operand, 0);
1418
1419 while (TREE_CODE_CLASS (TREE_CODE (false_operand)) == '1'
1420 || (TREE_CODE_CLASS (TREE_CODE (false_operand)) == '2'
1421 && TREE_CONSTANT (TREE_OPERAND (false_operand, 1))))
1422 false_operand = TREE_OPERAND (false_operand, 0);
1423
1424 if (TREE_CODE (true_operand) == SAVE_EXPR)
1425 result = build (COMPOUND_EXPR, result_type, true_operand, result);
1426 if (TREE_CODE (false_operand) == SAVE_EXPR)
1427 result = build (COMPOUND_EXPR, result_type, false_operand, result);
1428
1429 if (addr_p)
1430 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1431
1432 return result;
1433 }
1434 \f
1435
1436 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1437 the CALL_EXPR. */
1438
1439 tree
1440 build_call_1_expr (fundecl, arg)
1441 tree fundecl;
1442 tree arg;
1443 {
1444 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1445 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1446 chainon (NULL_TREE, build_tree_list (NULL_TREE, arg)),
1447 NULL_TREE);
1448
1449 TREE_SIDE_EFFECTS (call) = 1;
1450
1451 return call;
1452 }
1453
1454 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1455 the CALL_EXPR. */
1456
1457 tree
1458 build_call_2_expr (fundecl, arg1, arg2)
1459 tree fundecl;
1460 tree arg1, arg2;
1461 {
1462 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1463 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1464 chainon (chainon (NULL_TREE,
1465 build_tree_list (NULL_TREE, arg1)),
1466 build_tree_list (NULL_TREE, arg2)),
1467 NULL_TREE);
1468
1469 TREE_SIDE_EFFECTS (call) = 1;
1470
1471 return call;
1472 }
1473
1474 /* Likewise to call FUNDECL with no arguments. */
1475
1476 tree
1477 build_call_0_expr (fundecl)
1478 tree fundecl;
1479 {
1480 tree call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (fundecl)),
1481 build_unary_op (ADDR_EXPR, NULL_TREE, fundecl),
1482 NULL_TREE, NULL_TREE);
1483
1484 TREE_SIDE_EFFECTS (call) = 1;
1485
1486 return call;
1487 }
1488 \f
1489 /* Call a function that raises an exception and pass the line number and file
1490 name, if requested. MSG says which exception function to call. */
1491
1492 tree
1493 build_call_raise (msg)
1494 int msg;
1495 {
1496 tree fndecl = gnat_raise_decls[msg];
1497 const char *str = discard_file_names ? "" : ref_filename;
1498 int len = strlen (str) + 1;
1499 tree filename = build_string (len, str);
1500
1501 TREE_TYPE (filename)
1502 = build_array_type (char_type_node,
1503 build_index_type (build_int_2 (len, 0)));
1504
1505 return
1506 build_call_2_expr (fndecl,
1507 build1 (ADDR_EXPR, build_pointer_type (char_type_node),
1508 filename),
1509 build_int_2 (lineno, 0));
1510 }
1511 \f
1512 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1513
1514 tree
1515 build_constructor (type, list)
1516 tree type;
1517 tree list;
1518 {
1519 tree elmt;
1520 int allconstant = (TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST);
1521 int side_effects = 0;
1522 tree result;
1523
1524 for (elmt = list; elmt; elmt = TREE_CHAIN (elmt))
1525 {
1526 if (! TREE_CONSTANT (TREE_VALUE (elmt))
1527 || (TREE_CODE (type) == RECORD_TYPE
1528 && DECL_BIT_FIELD (TREE_PURPOSE (elmt))
1529 && TREE_CODE (TREE_VALUE (elmt)) != INTEGER_CST))
1530 allconstant = 0;
1531
1532 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt)))
1533 side_effects = 1;
1534
1535 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1536 be executing the code we generate here in that case, but handle it
1537 specially to avoid the cmpiler blowing up. */
1538 if (TREE_CODE (type) == RECORD_TYPE
1539 && (0 != (result
1540 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt))))))
1541 return build1 (NULL_EXPR, type, TREE_OPERAND (result, 0));
1542 }
1543
1544 /* If TYPE is a RECORD_TYPE and the fields are not in the
1545 same order as their bit position, don't treat this as constant
1546 since varasm.c can't handle it. */
1547 if (allconstant && TREE_CODE (type) == RECORD_TYPE)
1548 {
1549 tree last_pos = bitsize_zero_node;
1550 tree field;
1551
1552 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
1553 {
1554 tree this_pos = bit_position (field);
1555
1556 if (TREE_CODE (this_pos) != INTEGER_CST
1557 || tree_int_cst_lt (this_pos, last_pos))
1558 {
1559 allconstant = 0;
1560 break;
1561 }
1562
1563 last_pos = this_pos;
1564 }
1565 }
1566
1567 result = build (CONSTRUCTOR, type, NULL_TREE, list);
1568 TREE_CONSTANT (result) = allconstant;
1569 TREE_STATIC (result) = allconstant;
1570 TREE_SIDE_EFFECTS (result) = side_effects;
1571 TREE_READONLY (result) = TREE_READONLY (type);
1572
1573 return result;
1574 }
1575 \f
1576 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1577 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1578 for the field.
1579
1580 We also handle the fact that we might have been passed a pointer to the
1581 actual record and know how to look for fields in variant parts. */
1582
1583 static tree
1584 build_simple_component_ref (record_variable, component, field)
1585 tree record_variable;
1586 tree component;
1587 tree field;
1588 {
1589 tree record_type = TYPE_MAIN_VARIANT (TREE_TYPE (record_variable));
1590 tree ref;
1591
1592 if ((TREE_CODE (record_type) != RECORD_TYPE
1593 && TREE_CODE (record_type) != UNION_TYPE
1594 && TREE_CODE (record_type) != QUAL_UNION_TYPE)
1595 || TYPE_SIZE (record_type) == 0)
1596 gigi_abort (510);
1597
1598 /* Either COMPONENT or FIELD must be specified, but not both. */
1599 if ((component != 0) == (field != 0))
1600 gigi_abort (511);
1601
1602 /* If no field was specified, look for a field with the specified name
1603 in the current record only. */
1604 if (field == 0)
1605 for (field = TYPE_FIELDS (record_type); field;
1606 field = TREE_CHAIN (field))
1607 if (DECL_NAME (field) == component)
1608 break;
1609
1610 if (field == 0)
1611 return 0;
1612
1613 /* If this field is not in the specified record, see if we can find
1614 something in the record whose original field is the same as this one. */
1615 if (DECL_CONTEXT (field) != record_type)
1616 /* Check if there is a field with name COMPONENT in the record. */
1617 {
1618 tree new_field;
1619
1620 /* First loop thru normal components. */
1621
1622 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1623 new_field = TREE_CHAIN (new_field))
1624 if (DECL_ORIGINAL_FIELD (new_field) == field
1625 || new_field == DECL_ORIGINAL_FIELD (field)
1626 || (DECL_ORIGINAL_FIELD (field) != 0
1627 && (DECL_ORIGINAL_FIELD (field)
1628 == DECL_ORIGINAL_FIELD (new_field))))
1629 break;
1630
1631 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1632 the component in the first search. Doing this search in 2 steps
1633 is required to avoiding hidden homonymous fields in the
1634 _Parent field. */
1635
1636 if (new_field == 0)
1637 for (new_field = TYPE_FIELDS (record_type); new_field != 0;
1638 new_field = TREE_CHAIN (new_field))
1639 if (DECL_INTERNAL_P (new_field))
1640 {
1641 tree field_ref
1642 = build_simple_component_ref (record_variable,
1643 NULL_TREE, new_field);
1644 ref = build_simple_component_ref (field_ref, NULL_TREE, field);
1645
1646 if (ref != 0)
1647 return ref;
1648 }
1649
1650 field = new_field;
1651 }
1652
1653 if (field == 0)
1654 return 0;
1655
1656 /* It would be nice to call "fold" here, but that can lose a type
1657 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1658 ref = build (COMPONENT_REF, TREE_TYPE (field), record_variable, field);
1659
1660 if (TREE_READONLY (record_variable) || TREE_READONLY (field))
1661 TREE_READONLY (ref) = 1;
1662 if (TREE_THIS_VOLATILE (record_variable) || TREE_THIS_VOLATILE (field)
1663 || TYPE_VOLATILE (record_type))
1664 TREE_THIS_VOLATILE (ref) = 1;
1665
1666 return ref;
1667 }
1668 \f
1669 /* Like build_simple_component_ref, except that we give an error if the
1670 reference could not be found. */
1671
1672 tree
1673 build_component_ref (record_variable, component, field)
1674 tree record_variable;
1675 tree component;
1676 tree field;
1677 {
1678 tree ref = build_simple_component_ref (record_variable, component, field);
1679
1680 if (ref != 0)
1681 return ref;
1682
1683 /* If FIELD was specified, assume this is an invalid user field so
1684 raise constraint error. Otherwise, we can't find the type to return, so
1685 abort. */
1686
1687 else if (field != 0)
1688 return build1 (NULL_EXPR, TREE_TYPE (field),
1689 build_call_raise (CE_Discriminant_Check_Failed));
1690 else
1691 gigi_abort (512);
1692 }
1693 \f
1694 /* Build a GCC tree to call an allocation or deallocation function.
1695 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1696 generate an allocator.
1697
1698 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1699 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1700 storage pool to use. If not preset, malloc and free will be used except
1701 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1702 object dynamically on the stack frame. */
1703
1704 tree
1705 build_call_alloc_dealloc (gnu_obj, gnu_size, align, gnat_proc, gnat_pool)
1706 tree gnu_obj;
1707 tree gnu_size;
1708 int align;
1709 Entity_Id gnat_proc;
1710 Entity_Id gnat_pool;
1711 {
1712 tree gnu_align = size_int (align / BITS_PER_UNIT);
1713
1714 if (TREE_CODE (gnu_size) != INTEGER_CST && contains_placeholder_p (gnu_size))
1715 gnu_size = build (WITH_RECORD_EXPR, sizetype, gnu_size,
1716 build_unary_op (INDIRECT_REF, NULL_TREE, gnu_obj));
1717
1718 if (Present (gnat_proc))
1719 {
1720 /* The storage pools are obviously always tagged types, but the
1721 secondary stack uses the same mechanism and is not tagged */
1722 if (Is_Tagged_Type (Etype (gnat_pool)))
1723 {
1724 /* The size is the third parameter; the alignment is the
1725 same type. */
1726 Entity_Id gnat_size_type
1727 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc))));
1728 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1729 tree gnu_proc = gnat_to_gnu (gnat_proc);
1730 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1731 tree gnu_pool = gnat_to_gnu (gnat_pool);
1732 tree gnu_pool_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_pool);
1733 tree gnu_args = NULL_TREE;
1734 tree gnu_call;
1735
1736 /* The first arg is always the address of the storage pool; next
1737 comes the address of the object, for a deallocator, then the
1738 size and alignment. */
1739 gnu_args
1740 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_pool_addr));
1741
1742 if (gnu_obj)
1743 gnu_args
1744 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1745
1746 gnu_args
1747 = chainon (gnu_args,
1748 build_tree_list (NULL_TREE,
1749 convert (gnu_size_type, gnu_size)));
1750 gnu_args
1751 = chainon (gnu_args,
1752 build_tree_list (NULL_TREE,
1753 convert (gnu_size_type, gnu_align)));
1754
1755 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1756 gnu_proc_addr, gnu_args, NULL_TREE);
1757 TREE_SIDE_EFFECTS (gnu_call) = 1;
1758 return gnu_call;
1759 }
1760
1761 /* Secondary stack case. */
1762 else
1763 {
1764 /* The size is the second parameter */
1765 Entity_Id gnat_size_type
1766 = Etype (Next_Formal (First_Formal (gnat_proc)));
1767 tree gnu_size_type = gnat_to_gnu_type (gnat_size_type);
1768 tree gnu_proc = gnat_to_gnu (gnat_proc);
1769 tree gnu_proc_addr = build_unary_op (ADDR_EXPR, NULL_TREE, gnu_proc);
1770 tree gnu_args = NULL_TREE;
1771 tree gnu_call;
1772
1773 /* The first arg is the address of the object, for a
1774 deallocator, then the size */
1775 if (gnu_obj)
1776 gnu_args
1777 = chainon (gnu_args, build_tree_list (NULL_TREE, gnu_obj));
1778
1779 gnu_args
1780 = chainon (gnu_args,
1781 build_tree_list (NULL_TREE,
1782 convert (gnu_size_type, gnu_size)));
1783
1784 gnu_call = build (CALL_EXPR, TREE_TYPE (TREE_TYPE (gnu_proc)),
1785 gnu_proc_addr, gnu_args, NULL_TREE);
1786 TREE_SIDE_EFFECTS (gnu_call) = 1;
1787 return gnu_call;
1788 }
1789 }
1790
1791 else if (gnu_obj)
1792 return build_call_1_expr (free_decl, gnu_obj);
1793 else if (gnat_pool == -1)
1794 {
1795 /* If the size is a constant, we can put it in the fixed portion of
1796 the stack frame to avoid the need to adjust the stack pointer. */
1797 if (TREE_CODE (gnu_size) == INTEGER_CST && ! flag_stack_check)
1798 {
1799 tree gnu_range
1800 = build_range_type (NULL_TREE, size_one_node, gnu_size);
1801 tree gnu_array_type = build_array_type (char_type_node, gnu_range);
1802 tree gnu_decl =
1803 create_var_decl (get_identifier ("RETVAL"), NULL_TREE,
1804 gnu_array_type, NULL_TREE, 0, 0, 0, 0, 0);
1805
1806 return convert (ptr_void_type_node,
1807 build_unary_op (ADDR_EXPR, NULL_TREE, gnu_decl));
1808 }
1809 else
1810 return build (ALLOCATE_EXPR, ptr_void_type_node, gnu_size, gnu_align);
1811 }
1812 else
1813 return build_call_1_expr (malloc_decl, gnu_size);
1814 }
1815 \f
1816 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1817 initial value is INIT, if INIT is nonzero. Convert the expression to
1818 RESULT_TYPE, which must be some type of pointer. Return the tree.
1819 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1820 the storage pool to use. */
1821
1822 tree
1823 build_allocator (type, init, result_type, gnat_proc, gnat_pool)
1824 tree type;
1825 tree init;
1826 tree result_type;
1827 Entity_Id gnat_proc;
1828 Entity_Id gnat_pool;
1829 {
1830 tree size = TYPE_SIZE_UNIT (type);
1831 tree result;
1832
1833 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1834 if (init != 0 && TREE_CODE (init) == NULL_EXPR)
1835 return build1 (NULL_EXPR, result_type, TREE_OPERAND (init, 0));
1836
1837 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1838 sizes of the object and its template. Allocate the whole thing and
1839 fill in the parts that are known. */
1840 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type))
1841 {
1842 tree template_type
1843 = (TYPE_FAT_POINTER_P (result_type)
1844 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type))))
1845 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type))));
1846 tree storage_type
1847 = build_unc_object_type (template_type, type,
1848 get_identifier ("ALLOC"));
1849 tree storage_ptr_type = build_pointer_type (storage_type);
1850 tree storage;
1851 tree template_cons = NULL_TREE;
1852
1853 size = TYPE_SIZE_UNIT (storage_type);
1854
1855 if (TREE_CODE (size) != INTEGER_CST
1856 && contains_placeholder_p (size))
1857 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1858
1859 /* If the size overflows, pass -1 so the allocator will raise
1860 storage error. */
1861 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1862 size = ssize_int (-1);
1863
1864 storage = build_call_alloc_dealloc (NULL_TREE, size,
1865 TYPE_ALIGN (storage_type),
1866 gnat_proc, gnat_pool);
1867 storage = convert (storage_ptr_type, protect_multiple_eval (storage));
1868
1869 if (TREE_CODE (type) == RECORD_TYPE && TYPE_IS_PADDING_P (type))
1870 {
1871 type = TREE_TYPE (TYPE_FIELDS (type));
1872
1873 if (init != 0)
1874 init = convert (type, init);
1875 }
1876
1877 /* If there is an initializing expression, make a constructor for
1878 the entire object including the bounds and copy it into the
1879 object. If there is no initializing expression, just set the
1880 bounds. */
1881 if (init != 0)
1882 {
1883 template_cons = tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type)),
1884 init, NULL_TREE);
1885 template_cons = tree_cons (TYPE_FIELDS (storage_type),
1886 build_template (template_type, type,
1887 init),
1888 template_cons);
1889
1890 return convert
1891 (result_type,
1892 build (COMPOUND_EXPR, storage_ptr_type,
1893 build_binary_op
1894 (MODIFY_EXPR, storage_type,
1895 build_unary_op (INDIRECT_REF, NULL_TREE,
1896 convert (storage_ptr_type, storage)),
1897 build_constructor (storage_type, template_cons)),
1898 convert (storage_ptr_type, storage)));
1899 }
1900 else
1901 return build
1902 (COMPOUND_EXPR, result_type,
1903 build_binary_op
1904 (MODIFY_EXPR, template_type,
1905 build_component_ref
1906 (build_unary_op (INDIRECT_REF, NULL_TREE,
1907 convert (storage_ptr_type, storage)),
1908 NULL_TREE, TYPE_FIELDS (storage_type)),
1909 build_template (template_type, type, NULL_TREE)),
1910 convert (result_type, convert (storage_ptr_type, storage)));
1911 }
1912
1913 /* If we have an initializing expression, see if its size is simpler
1914 than the size from the type. */
1915 if (init != 0 && TYPE_SIZE_UNIT (TREE_TYPE (init)) != 0
1916 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init))) == INTEGER_CST
1917 || (TREE_CODE (size) != INTEGER_CST
1918 && contains_placeholder_p (size))))
1919 size = TYPE_SIZE_UNIT (TREE_TYPE (init));
1920
1921 /* If the size is still self-referential, reference the initializing
1922 expression, if it is present. If not, this must have been a
1923 call to allocate a library-level object, in which case we use
1924 the maximum size. */
1925 if (TREE_CODE (size) != INTEGER_CST && contains_placeholder_p (size))
1926 {
1927 if (init == 0)
1928 size = max_size (size, 1);
1929 else
1930 size = build (WITH_RECORD_EXPR, sizetype, size, init);
1931 }
1932
1933 /* If the size overflows, pass -1 so the allocator will raise
1934 storage error. */
1935 if (TREE_CODE (size) == INTEGER_CST && TREE_OVERFLOW (size))
1936 size = ssize_int (-1);
1937
1938 /* If this is a type whose alignment is larger than the
1939 biggest we support in normal alignment and this is in
1940 the default storage pool, make an "aligning type", allocate
1941 it, point to the field we need, and return that. */
1942 if (TYPE_ALIGN (type) > BIGGEST_ALIGNMENT
1943 && No (gnat_proc))
1944 {
1945 tree new_type = make_aligning_type (type, TYPE_ALIGN (type), size);
1946
1947 result = build_call_alloc_dealloc (NULL_TREE, TYPE_SIZE (new_type),
1948 BIGGEST_ALIGNMENT, Empty, Empty);
1949 result = save_expr (result);
1950 result = convert (build_pointer_type (new_type), result);
1951 result = build_unary_op (INDIRECT_REF, NULL_TREE, result);
1952 result = build_component_ref (result, NULL_TREE,
1953 TYPE_FIELDS (new_type));
1954 result = convert (result_type,
1955 build_unary_op (ADDR_EXPR, NULL_TREE, result));
1956 }
1957 else
1958 result = convert (result_type,
1959 build_call_alloc_dealloc (NULL_TREE, size,
1960 TYPE_ALIGN (type),
1961 gnat_proc, gnat_pool));
1962
1963 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1964 the value, and return the address. Do this with a COMPOUND_EXPR. */
1965
1966 if (init)
1967 {
1968 result = save_expr (result);
1969 result
1970 = build (COMPOUND_EXPR, TREE_TYPE (result),
1971 build_binary_op
1972 (MODIFY_EXPR, TREE_TYPE (TREE_TYPE (result)),
1973 build_unary_op (INDIRECT_REF, TREE_TYPE (TREE_TYPE (result)),
1974 result),
1975 init),
1976 result);
1977 }
1978
1979 return convert (result_type, result);
1980 }
1981 \f
1982 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1983 GNAT_FORMAL is how we find the descriptor record. */
1984
1985 tree
1986 fill_vms_descriptor (expr, gnat_formal)
1987 tree expr;
1988 Entity_Id gnat_formal;
1989 {
1990 tree record_type = TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal)));
1991 tree field;
1992 tree const_list = 0;
1993
1994 expr = maybe_unconstrained_array (expr);
1995 gnat_mark_addressable (expr);
1996
1997 for (field = TYPE_FIELDS (record_type); field; field = TREE_CHAIN (field))
1998 {
1999 tree init = DECL_INITIAL (field);
2000
2001 if (TREE_CODE (init) != INTEGER_CST
2002 && contains_placeholder_p (init))
2003 init = build (WITH_RECORD_EXPR, TREE_TYPE (init), init, expr);
2004
2005 const_list = tree_cons (field, convert (TREE_TYPE (field), init),
2006 const_list);
2007 }
2008
2009 return build_constructor (record_type, nreverse (const_list));
2010 }
2011
2012 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2013 should not be allocated in a register. Returns true if successful. */
2014
2015 bool
2016 gnat_mark_addressable (expr_node)
2017 tree expr_node;
2018 {
2019 while (1)
2020 switch (TREE_CODE (expr_node))
2021 {
2022 case ADDR_EXPR:
2023 case COMPONENT_REF:
2024 case ARRAY_REF:
2025 case ARRAY_RANGE_REF:
2026 case REALPART_EXPR:
2027 case IMAGPART_EXPR:
2028 case NOP_EXPR:
2029 expr_node = TREE_OPERAND (expr_node, 0);
2030 break;
2031
2032 case CONSTRUCTOR:
2033 TREE_ADDRESSABLE (expr_node) = 1;
2034 return true;
2035
2036 case VAR_DECL:
2037 case PARM_DECL:
2038 case RESULT_DECL:
2039 put_var_into_stack (expr_node);
2040 TREE_ADDRESSABLE (expr_node) = 1;
2041 return true;
2042
2043 case FUNCTION_DECL:
2044 TREE_ADDRESSABLE (expr_node) = 1;
2045 return true;
2046
2047 case CONST_DECL:
2048 return (DECL_CONST_CORRESPONDING_VAR (expr_node) != 0
2049 && (gnat_mark_addressable
2050 (DECL_CONST_CORRESPONDING_VAR (expr_node))));
2051 default:
2052 return true;
2053 }
2054 }