1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
10 * Copyright (C) 1992-2002, Free Software Foundation, Inc. *
12 * GNAT is free software; you can redistribute it and/or modify it under *
13 * terms of the GNU General Public License as published by the Free Soft- *
14 * ware Foundation; either version 2, or (at your option) any later ver- *
15 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
16 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
17 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
18 * for more details. You should have received a copy of the GNU General *
19 * Public License distributed with GNAT; see file COPYING. If not, write *
20 * to the Free Software Foundation, 59 Temple Place - Suite 330, Boston, *
21 * MA 02111-1307, USA. *
23 * GNAT was originally developed by the GNAT team at New York University. *
24 * It is now maintained by Ada Core Technologies Inc (http://www.gnat.com). *
26 ****************************************************************************/
45 static tree find_common_type
PARAMS ((tree
, tree
));
46 static int contains_save_expr_p
PARAMS ((tree
));
47 static tree contains_null_expr
PARAMS ((tree
));
48 static tree compare_arrays
PARAMS ((tree
, tree
, tree
));
49 static tree nonbinary_modular_operation
PARAMS ((enum tree_code
, tree
,
51 static tree build_simple_component_ref
PARAMS ((tree
, tree
, tree
));
53 /* Prepare expr to be an argument of a TRUTH_NOT_EXPR or other logical
56 This preparation consists of taking the ordinary representation of
57 an expression expr and producing a valid tree boolean expression
58 describing whether expr is nonzero. We could simply always do
60 build_binary_op (NE_EXPR, expr, integer_zero_node, 1),
62 but we optimize comparisons, &&, ||, and !.
64 The resulting type should always be the same as the input type.
65 This function is simpler than the corresponding C version since
66 the only possible operands will be things of Boolean type. */
69 gnat_truthvalue_conversion (expr
)
72 tree type
= TREE_TYPE (expr
);
74 switch (TREE_CODE (expr
))
76 case EQ_EXPR
: case NE_EXPR
: case LE_EXPR
: case GE_EXPR
:
77 case LT_EXPR
: case GT_EXPR
:
78 case TRUTH_ANDIF_EXPR
:
87 /* Distribute the conversion into the arms of a COND_EXPR. */
89 (build (COND_EXPR
, type
, TREE_OPERAND (expr
, 0),
90 gnat_truthvalue_conversion (TREE_OPERAND (expr
, 1)),
91 gnat_truthvalue_conversion (TREE_OPERAND (expr
, 2))));
93 case WITH_RECORD_EXPR
:
94 return build (WITH_RECORD_EXPR
, type
,
95 gnat_truthvalue_conversion (TREE_OPERAND (expr
, 0)),
96 TREE_OPERAND (expr
, 1));
99 return build_binary_op (NE_EXPR
, type
, expr
,
100 convert (type
, integer_zero_node
));
104 /* Return the base type of TYPE. */
110 if (TREE_CODE (type
) == RECORD_TYPE
111 && TYPE_LEFT_JUSTIFIED_MODULAR_P (type
))
112 type
= TREE_TYPE (TYPE_FIELDS (type
));
114 while (TREE_TYPE (type
) != 0
115 && (TREE_CODE (type
) == INTEGER_TYPE
116 || TREE_CODE (type
) == REAL_TYPE
))
117 type
= TREE_TYPE (type
);
122 /* Likewise, but only return types known to the Ada source. */
124 get_ada_base_type (type
)
127 while (TREE_TYPE (type
) != 0
128 && (TREE_CODE (type
) == INTEGER_TYPE
129 || TREE_CODE (type
) == REAL_TYPE
)
130 && ! TYPE_EXTRA_SUBTYPE_P (type
))
131 type
= TREE_TYPE (type
);
136 /* EXP is a GCC tree representing an address. See if we can find how
137 strictly the object at that address is aligned. Return that alignment
138 in bits. If we don't know anything about the alignment, return 0.
139 We do not go merely by type information here since the check on
140 N_Validate_Unchecked_Alignment does that. */
143 known_alignment (exp
)
146 unsigned int lhs
, rhs
;
148 switch (TREE_CODE (exp
))
152 case NON_LVALUE_EXPR
:
153 /* Conversions between pointers and integers don't change the alignment
154 of the underlying object. */
155 return known_alignment (TREE_OPERAND (exp
, 0));
159 /* If two address are added, the alignment of the result is the
160 minimum of the two aligments. */
161 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
162 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
163 return MIN (lhs
, rhs
);
166 /* The first part of this represents the lowest bit in the constant,
167 but is it in bytes, not bits. */
168 return MIN (BITS_PER_UNIT
169 * (TREE_INT_CST_LOW (exp
) & - TREE_INT_CST_LOW (exp
)),
173 /* If we know the alignment of just one side, use it. Otherwise,
174 use the product of the alignments. */
175 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
176 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
177 if (lhs
== 0 || rhs
== 0)
178 return MIN (BIGGEST_ALIGNMENT
, MAX (lhs
, rhs
));
180 return MIN (BIGGEST_ALIGNMENT
, lhs
* rhs
);
183 return expr_align (TREE_OPERAND (exp
, 0));
190 /* We have a comparison or assignment operation on two types, T1 and T2,
191 which are both either array types or both record types.
192 Return the type that both operands should be converted to, if any.
193 Otherwise return zero. */
196 find_common_type (t1
, t2
)
199 /* If either type is non-BLKmode, use it. Note that we know that we will
200 not have any alignment problems since if we did the non-BLKmode
201 type could not have been used. */
202 if (TYPE_MODE (t1
) != BLKmode
)
204 else if (TYPE_MODE (t2
) != BLKmode
)
207 /* Otherwise, return the type that has a constant size. */
208 if (TREE_CONSTANT (TYPE_SIZE (t1
)))
210 else if (TREE_CONSTANT (TYPE_SIZE (t2
)))
213 /* In this case, both types have variable size. It's probably
214 best to leave the "type mismatch" because changing it could
215 case a bad self-referential reference. */
219 /* See if EXP contains a SAVE_EXPR in a position where we would
222 ??? This is a real kludge, but is probably the best approach short
223 of some very general solution. */
226 contains_save_expr_p (exp
)
229 switch (TREE_CODE (exp
))
234 case ADDR_EXPR
: case INDIRECT_REF
:
236 case NOP_EXPR
: case CONVERT_EXPR
: case VIEW_CONVERT_EXPR
:
237 return contains_save_expr_p (TREE_OPERAND (exp
, 0));
240 return (CONSTRUCTOR_ELTS (exp
) != 0
241 && contains_save_expr_p (CONSTRUCTOR_ELTS (exp
)));
244 return (contains_save_expr_p (TREE_VALUE (exp
))
245 || (TREE_CHAIN (exp
) != 0
246 && contains_save_expr_p (TREE_CHAIN (exp
))));
253 /* See if EXP contains a NULL_EXPR in an expression we use for sizes. Return
254 it if so. This is used to detect types whose sizes involve computations
255 that are known to raise Constraint_Error. */
258 contains_null_expr (exp
)
263 if (TREE_CODE (exp
) == NULL_EXPR
)
266 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
269 return contains_null_expr (TREE_OPERAND (exp
, 0));
272 tem
= contains_null_expr (TREE_OPERAND (exp
, 0));
276 return contains_null_expr (TREE_OPERAND (exp
, 1));
279 switch (TREE_CODE (exp
))
282 return contains_null_expr (TREE_OPERAND (exp
, 0));
285 tem
= contains_null_expr (TREE_OPERAND (exp
, 0));
289 tem
= contains_null_expr (TREE_OPERAND (exp
, 1));
293 return contains_null_expr (TREE_OPERAND (exp
, 2));
304 /* Return an expression tree representing an equality comparison of
305 A1 and A2, two objects of ARRAY_TYPE. The returned expression should
306 be of type RESULT_TYPE
308 Two arrays are equal in one of two ways: (1) if both have zero length
309 in some dimension (not necessarily the same dimension) or (2) if the
310 lengths in each dimension are equal and the data is equal. We perform the
311 length tests in as efficient a manner as possible. */
314 compare_arrays (result_type
, a1
, a2
)
318 tree t1
= TREE_TYPE (a1
);
319 tree t2
= TREE_TYPE (a2
);
320 tree result
= convert (result_type
, integer_one_node
);
321 tree a1_is_null
= convert (result_type
, integer_zero_node
);
322 tree a2_is_null
= convert (result_type
, integer_zero_node
);
323 int length_zero_p
= 0;
325 /* Process each dimension separately and compare the lengths. If any
326 dimension has a size known to be zero, set SIZE_ZERO_P to 1 to
327 suppress the comparison of the data. */
328 while (TREE_CODE (t1
) == ARRAY_TYPE
&& TREE_CODE (t2
) == ARRAY_TYPE
)
330 tree lb1
= TYPE_MIN_VALUE (TYPE_DOMAIN (t1
));
331 tree ub1
= TYPE_MAX_VALUE (TYPE_DOMAIN (t1
));
332 tree lb2
= TYPE_MIN_VALUE (TYPE_DOMAIN (t2
));
333 tree ub2
= TYPE_MAX_VALUE (TYPE_DOMAIN (t2
));
334 tree bt
= get_base_type (TREE_TYPE (lb1
));
335 tree length1
= fold (build (MINUS_EXPR
, bt
, ub1
, lb1
));
336 tree length2
= fold (build (MINUS_EXPR
, bt
, ub2
, lb2
));
339 tree comparison
, this_a1_is_null
, this_a2_is_null
;
341 /* If the length of the first array is a constant, swap our operands
342 unless the length of the second array is the constant zero.
343 Note that we have set the `length' values to the length - 1. */
344 if (TREE_CODE (length1
) == INTEGER_CST
345 && ! integer_zerop (fold (build (PLUS_EXPR
, bt
, length2
,
346 convert (bt
, integer_one_node
)))))
348 tem
= a1
, a1
= a2
, a2
= tem
;
349 tem
= t1
, t1
= t2
, t2
= tem
;
350 tem
= lb1
, lb1
= lb2
, lb2
= tem
;
351 tem
= ub1
, ub1
= ub2
, ub2
= tem
;
352 tem
= length1
, length1
= length2
, length2
= tem
;
353 tem
= a1_is_null
, a1_is_null
= a2_is_null
, a2_is_null
= tem
;
356 /* If the length of this dimension in the second array is the constant
357 zero, we can just go inside the original bounds for the first
358 array and see if last < first. */
359 if (integer_zerop (fold (build (PLUS_EXPR
, bt
, length2
,
360 convert (bt
, integer_one_node
)))))
362 tree ub
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
363 tree lb
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
365 comparison
= build_binary_op (LT_EXPR
, result_type
, ub
, lb
);
367 if (contains_placeholder_p (comparison
))
368 comparison
= build (WITH_RECORD_EXPR
, result_type
,
370 if (contains_placeholder_p (length1
))
371 length1
= build (WITH_RECORD_EXPR
, bt
, length1
, a1
);
375 this_a1_is_null
= comparison
;
376 this_a2_is_null
= convert (result_type
, integer_one_node
);
379 /* If the length is some other constant value, we know that the
380 this dimension in the first array cannot be superflat, so we
381 can just use its length from the actual stored bounds. */
382 else if (TREE_CODE (length2
) == INTEGER_CST
)
384 ub1
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
385 lb1
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t1
)));
386 ub2
= TYPE_MAX_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2
)));
387 lb2
= TYPE_MIN_VALUE (TYPE_INDEX_TYPE (TYPE_DOMAIN (t2
)));
388 nbt
= get_base_type (TREE_TYPE (ub1
));
391 = build_binary_op (EQ_EXPR
, result_type
,
392 build_binary_op (MINUS_EXPR
, nbt
, ub1
, lb1
),
393 build_binary_op (MINUS_EXPR
, nbt
, ub2
, lb2
));
395 /* Note that we know that UB2 and LB2 are constant and hence
396 cannot contain a PLACEHOLDER_EXPR. */
398 if (contains_placeholder_p (comparison
))
399 comparison
= build (WITH_RECORD_EXPR
, result_type
, comparison
, a1
);
400 if (contains_placeholder_p (length1
))
401 length1
= build (WITH_RECORD_EXPR
, bt
, length1
, a1
);
403 this_a1_is_null
= build_binary_op (LT_EXPR
, result_type
, ub1
, lb1
);
404 this_a2_is_null
= convert (result_type
, integer_zero_node
);
407 /* Otherwise compare the computed lengths. */
410 if (contains_placeholder_p (length1
))
411 length1
= build (WITH_RECORD_EXPR
, bt
, length1
, a1
);
412 if (contains_placeholder_p (length2
))
413 length2
= build (WITH_RECORD_EXPR
, bt
, length2
, a2
);
416 = build_binary_op (EQ_EXPR
, result_type
, length1
, length2
);
419 = build_binary_op (LT_EXPR
, result_type
, length1
,
420 convert (bt
, integer_zero_node
));
422 = build_binary_op (LT_EXPR
, result_type
, length2
,
423 convert (bt
, integer_zero_node
));
426 result
= build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
429 a1_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
430 this_a1_is_null
, a1_is_null
);
431 a2_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
432 this_a2_is_null
, a2_is_null
);
438 /* Unless the size of some bound is known to be zero, compare the
439 data in the array. */
442 tree type
= find_common_type (TREE_TYPE (a1
), TREE_TYPE (a2
));
445 a1
= convert (type
, a1
), a2
= convert (type
, a2
);
448 result
= build_binary_op (TRUTH_ANDIF_EXPR
, result_type
, result
,
449 fold (build (EQ_EXPR
, result_type
, a1
, a2
)));
453 /* The result is also true if both sizes are zero. */
454 result
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
455 build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
456 a1_is_null
, a2_is_null
),
459 /* If either operand contains SAVE_EXPRs, they have to be evaluated before
460 starting the comparison above since the place it would be otherwise
461 evaluated would be wrong. */
463 if (contains_save_expr_p (a1
))
464 result
= build (COMPOUND_EXPR
, result_type
, a1
, result
);
466 if (contains_save_expr_p (a2
))
467 result
= build (COMPOUND_EXPR
, result_type
, a2
, result
);
472 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
473 type TYPE. We know that TYPE is a modular type with a nonbinary
477 nonbinary_modular_operation (op_code
, type
, lhs
, rhs
)
478 enum tree_code op_code
;
482 tree modulus
= TYPE_MODULUS (type
);
483 unsigned int needed_precision
= tree_floor_log2 (modulus
) + 1;
484 unsigned int precision
;
489 /* If this is an addition of a constant, convert it to a subtraction
490 of a constant since we can do that faster. */
491 if (op_code
== PLUS_EXPR
&& TREE_CODE (rhs
) == INTEGER_CST
)
492 rhs
= fold (build (MINUS_EXPR
, type
, modulus
, rhs
)), op_code
= MINUS_EXPR
;
494 /* For the logical operations, we only need PRECISION bits. For
495 addition and subraction, we need one more and for multiplication we
496 need twice as many. But we never want to make a size smaller than
498 if (op_code
== PLUS_EXPR
|| op_code
== MINUS_EXPR
)
499 needed_precision
+= 1;
500 else if (op_code
== MULT_EXPR
)
501 needed_precision
*= 2;
503 precision
= MAX (needed_precision
, TYPE_PRECISION (op_type
));
505 /* Unsigned will do for everything but subtraction. */
506 if (op_code
== MINUS_EXPR
)
509 /* If our type is the wrong signedness or isn't wide enough, make a new
510 type and convert both our operands to it. */
511 if (TYPE_PRECISION (op_type
) < precision
512 || TREE_UNSIGNED (op_type
) != unsignedp
)
514 /* Copy the node so we ensure it can be modified to make it modular. */
515 op_type
= copy_node (gnat_type_for_size (precision
, unsignedp
));
516 modulus
= convert (op_type
, modulus
);
517 TYPE_MODULUS (op_type
) = modulus
;
518 TYPE_MODULAR_P (op_type
) = 1;
519 lhs
= convert (op_type
, lhs
);
520 rhs
= convert (op_type
, rhs
);
523 /* Do the operation, then we'll fix it up. */
524 result
= fold (build (op_code
, op_type
, lhs
, rhs
));
526 /* For multiplication, we have no choice but to do a full modulus
527 operation. However, we want to do this in the narrowest
529 if (op_code
== MULT_EXPR
)
531 tree div_type
= copy_node (gnat_type_for_size (needed_precision
, 1));
532 modulus
= convert (div_type
, modulus
);
533 TYPE_MODULUS (div_type
) = modulus
;
534 TYPE_MODULAR_P (div_type
) = 1;
535 result
= convert (op_type
,
536 fold (build (TRUNC_MOD_EXPR
, div_type
,
537 convert (div_type
, result
), modulus
)));
540 /* For subtraction, add the modulus back if we are negative. */
541 else if (op_code
== MINUS_EXPR
)
543 result
= save_expr (result
);
544 result
= fold (build (COND_EXPR
, op_type
,
545 build (LT_EXPR
, integer_type_node
, result
,
546 convert (op_type
, integer_zero_node
)),
547 fold (build (PLUS_EXPR
, op_type
,
552 /* For the other operations, subtract the modulus if we are >= it. */
555 result
= save_expr (result
);
556 result
= fold (build (COND_EXPR
, op_type
,
557 build (GE_EXPR
, integer_type_node
,
559 fold (build (MINUS_EXPR
, op_type
,
564 return convert (type
, result
);
567 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
568 desired for the result. Usually the operation is to be performed
569 in that type. For MODIFY_EXPR and ARRAY_REF, RESULT_TYPE may be 0
570 in which case the type to be used will be derived from the operands.
572 This function is very much unlike the ones for C and C++ since we
573 have already done any type conversion and matching required. All we
574 have to do here is validate the work done by SEM and handle subtypes. */
577 build_binary_op (op_code
, result_type
, left_operand
, right_operand
)
578 enum tree_code op_code
;
583 tree left_type
= TREE_TYPE (left_operand
);
584 tree right_type
= TREE_TYPE (right_operand
);
585 tree left_base_type
= get_base_type (left_type
);
586 tree right_base_type
= get_base_type (right_type
);
587 tree operation_type
= result_type
;
591 int has_side_effects
= 0;
593 /* If one (but not both, unless they have the same object) operands are a
594 WITH_RECORD_EXPR, do the operation and then surround it with the
595 WITH_RECORD_EXPR. Don't do this for assignment, for an ARRAY_REF, or
596 for an ARRAY_RANGE_REF because we need to keep track of the
597 WITH_RECORD_EXPRs on both operands very carefully. */
598 if (op_code
!= MODIFY_EXPR
&& op_code
!= ARRAY_REF
599 && op_code
!= ARRAY_RANGE_REF
600 && TREE_CODE (left_operand
) == WITH_RECORD_EXPR
601 && (TREE_CODE (right_operand
) != WITH_RECORD_EXPR
602 || operand_equal_p (TREE_OPERAND (left_operand
, 1),
603 TREE_OPERAND (right_operand
, 1), 0)))
605 tree right
= right_operand
;
607 if (TREE_CODE (right
) == WITH_RECORD_EXPR
)
608 right
= TREE_OPERAND (right
, 0);
610 result
= build_binary_op (op_code
, result_type
,
611 TREE_OPERAND (left_operand
, 0), right
);
612 return build (WITH_RECORD_EXPR
, TREE_TYPE (result
), result
,
613 TREE_OPERAND (left_operand
, 1));
615 else if (op_code
!= MODIFY_EXPR
&& op_code
!= ARRAY_REF
616 && op_code
!= ARRAY_RANGE_REF
617 && TREE_CODE (left_operand
) != WITH_RECORD_EXPR
618 && TREE_CODE (right_operand
) == WITH_RECORD_EXPR
)
620 result
= build_binary_op (op_code
, result_type
, left_operand
,
621 TREE_OPERAND (right_operand
, 0));
622 return build (WITH_RECORD_EXPR
, TREE_TYPE (result
), result
,
623 TREE_OPERAND (right_operand
, 1));
626 if (operation_type
!= 0
627 && TREE_CODE (operation_type
) == RECORD_TYPE
628 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type
))
629 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
631 if (operation_type
!= 0
632 && ! AGGREGATE_TYPE_P (operation_type
)
633 && TYPE_EXTRA_SUBTYPE_P (operation_type
))
634 operation_type
= get_base_type (operation_type
);
636 modulus
= (operation_type
!= 0 && TREE_CODE (operation_type
) == INTEGER_TYPE
637 && TYPE_MODULAR_P (operation_type
)
638 ? TYPE_MODULUS (operation_type
) : 0);
643 /* If there were any integral or pointer conversions on LHS, remove
644 them; we'll be putting them back below if needed. Likewise for
645 conversions between array and record types. But don't do this if
646 the right operand is not BLKmode (for packed arrays)
647 unless we are not changing the mode. */
648 while ((TREE_CODE (left_operand
) == CONVERT_EXPR
649 || TREE_CODE (left_operand
) == NOP_EXPR
650 || TREE_CODE (left_operand
) == VIEW_CONVERT_EXPR
)
651 && (((INTEGRAL_TYPE_P (left_type
)
652 || POINTER_TYPE_P (left_type
))
653 && (INTEGRAL_TYPE_P (TREE_TYPE
654 (TREE_OPERAND (left_operand
, 0)))
655 || POINTER_TYPE_P (TREE_TYPE
656 (TREE_OPERAND (left_operand
, 0)))))
657 || (((TREE_CODE (left_type
) == RECORD_TYPE
658 /* Don't remove conversions to left-justified modular
660 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type
))
661 || TREE_CODE (left_type
) == ARRAY_TYPE
)
662 && ((TREE_CODE (TREE_TYPE
663 (TREE_OPERAND (left_operand
, 0)))
665 || (TREE_CODE (TREE_TYPE
666 (TREE_OPERAND (left_operand
, 0)))
668 && (TYPE_MODE (right_type
) == BLKmode
669 || (TYPE_MODE (left_type
)
670 == TYPE_MODE (TREE_TYPE
672 (left_operand
, 0))))))))
674 left_operand
= TREE_OPERAND (left_operand
, 0);
675 left_type
= TREE_TYPE (left_operand
);
678 if (operation_type
== 0)
679 operation_type
= left_type
;
681 /* If the RHS has a conversion between record and array types and
682 an inner type is no worse, use it. Note we cannot do this for
683 modular types or types with TYPE_ALIGN_OK, since the latter
684 might indicate a conversion between a root type and a class-wide
685 type, which we must not remove. */
686 while (TREE_CODE (right_operand
) == VIEW_CONVERT_EXPR
687 && ((TREE_CODE (right_type
) == RECORD_TYPE
688 && ! TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type
)
689 && ! TYPE_ALIGN_OK (right_type
)
690 && ! TYPE_IS_FAT_POINTER_P (right_type
))
691 || TREE_CODE (right_type
) == ARRAY_TYPE
)
692 && (((TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand
, 0)))
694 && ! (TYPE_LEFT_JUSTIFIED_MODULAR_P
695 (TREE_TYPE (TREE_OPERAND (right_operand
, 0))))
697 (TREE_TYPE (TREE_OPERAND (right_operand
, 0))))
698 && ! (TYPE_IS_FAT_POINTER_P
699 (TREE_TYPE (TREE_OPERAND (right_operand
, 0)))))
700 || (TREE_CODE (TREE_TYPE (TREE_OPERAND (right_operand
, 0)))
703 == find_common_type (right_type
,
704 TREE_TYPE (TREE_OPERAND
705 (right_operand
, 0))))
706 || right_type
!= best_type
))
708 right_operand
= TREE_OPERAND (right_operand
, 0);
709 right_type
= TREE_TYPE (right_operand
);
712 /* If we are copying one array or record to another, find the best type
714 if (((TREE_CODE (left_type
) == ARRAY_TYPE
715 && TREE_CODE (right_type
) == ARRAY_TYPE
)
716 || (TREE_CODE (left_type
) == RECORD_TYPE
717 && TREE_CODE (right_type
) == RECORD_TYPE
))
718 && (best_type
= find_common_type (left_type
, right_type
)) != 0)
719 operation_type
= best_type
;
721 /* If a class-wide type may be involved, force use of the RHS type. */
722 if (TREE_CODE (right_type
) == RECORD_TYPE
&& TYPE_ALIGN_OK (right_type
))
723 operation_type
= right_type
;
725 /* Ensure everything on the LHS is valid. If we have a field reference,
726 strip anything that get_inner_reference can handle. Then remove any
727 conversions with type types having the same code and mode. Mark
728 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
729 either an INDIRECT_REF or a decl. */
730 result
= left_operand
;
733 tree restype
= TREE_TYPE (result
);
735 if (TREE_CODE (result
) == COMPONENT_REF
736 || TREE_CODE (result
) == ARRAY_REF
737 || TREE_CODE (result
) == ARRAY_RANGE_REF
)
738 while (handled_component_p (result
))
739 result
= TREE_OPERAND (result
, 0);
740 else if (TREE_CODE (result
) == REALPART_EXPR
741 || TREE_CODE (result
) == IMAGPART_EXPR
742 || TREE_CODE (result
) == WITH_RECORD_EXPR
743 || ((TREE_CODE (result
) == NOP_EXPR
744 || TREE_CODE (result
) == CONVERT_EXPR
)
745 && (((TREE_CODE (restype
)
746 == TREE_CODE (TREE_TYPE
747 (TREE_OPERAND (result
, 0))))
748 && (TYPE_MODE (TREE_TYPE
749 (TREE_OPERAND (result
, 0)))
750 == TYPE_MODE (restype
)))
751 || TYPE_ALIGN_OK (restype
))))
752 result
= TREE_OPERAND (result
, 0);
753 else if (TREE_CODE (result
) == VIEW_CONVERT_EXPR
)
755 TREE_ADDRESSABLE (result
) = 1;
756 result
= TREE_OPERAND (result
, 0);
762 if (TREE_CODE (result
) != INDIRECT_REF
&& TREE_CODE (result
) != NULL_EXPR
763 && ! DECL_P (result
))
766 /* Convert the right operand to the operation type unless
767 it is either already of the correct type or if the type
768 involves a placeholder, since the RHS may not have the same
770 if (operation_type
!= right_type
771 && (! (TREE_CODE (TYPE_SIZE (operation_type
)) != INTEGER_CST
772 && contains_placeholder_p (TYPE_SIZE (operation_type
)))))
774 /* For a variable-size type, with both BLKmode, convert using
775 CONVERT_EXPR instead of an unchecked conversion since we don't
776 need to make a temporary (and can't anyway). */
777 if (TREE_CODE (TYPE_SIZE (operation_type
)) != INTEGER_CST
778 && TYPE_MODE (TREE_TYPE (right_operand
)) == BLKmode
779 && TREE_CODE (right_operand
) != UNCONSTRAINED_ARRAY_REF
)
780 right_operand
= build1 (CONVERT_EXPR
, operation_type
,
783 right_operand
= convert (operation_type
, right_operand
);
785 right_type
= operation_type
;
788 /* If the modes differ, make up a bogus type and convert the RHS to
789 it. This can happen with packed types. */
790 if (TYPE_MODE (left_type
) != TYPE_MODE (right_type
))
792 tree new_type
= copy_node (left_type
);
794 TYPE_SIZE (new_type
) = TYPE_SIZE (right_type
);
795 TYPE_SIZE_UNIT (new_type
) = TYPE_SIZE_UNIT (right_type
);
796 TYPE_MAIN_VARIANT (new_type
) = new_type
;
797 right_operand
= convert (new_type
, right_operand
);
800 has_side_effects
= 1;
805 if (operation_type
== 0)
806 operation_type
= TREE_TYPE (left_type
);
808 /* ... fall through ... */
810 case ARRAY_RANGE_REF
:
812 /* First convert the right operand to its base type. This will
813 prevent unneed signedness conversions when sizetype is wider than
815 right_operand
= convert (right_base_type
, right_operand
);
816 right_operand
= convert (TYPE_DOMAIN (left_type
), right_operand
);
818 if (! TREE_CONSTANT (right_operand
)
819 || ! TREE_CONSTANT (TYPE_MIN_VALUE (right_type
))
820 || op_code
== ARRAY_RANGE_REF
)
821 gnat_mark_addressable (left_operand
);
830 if (POINTER_TYPE_P (left_type
))
833 /* ... fall through ... */
837 /* If either operand is a NULL_EXPR, just return a new one. */
838 if (TREE_CODE (left_operand
) == NULL_EXPR
)
839 return build (op_code
, result_type
,
840 build1 (NULL_EXPR
, integer_type_node
,
841 TREE_OPERAND (left_operand
, 0)),
844 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
845 return build (op_code
, result_type
,
846 build1 (NULL_EXPR
, integer_type_node
,
847 TREE_OPERAND (right_operand
, 0)),
850 /* If either object is a left-justified modular types, get the
851 fields from within. */
852 if (TREE_CODE (left_type
) == RECORD_TYPE
853 && TYPE_LEFT_JUSTIFIED_MODULAR_P (left_type
))
855 left_operand
= convert (TREE_TYPE (TYPE_FIELDS (left_type
)),
857 left_type
= TREE_TYPE (left_operand
);
858 left_base_type
= get_base_type (left_type
);
861 if (TREE_CODE (right_type
) == RECORD_TYPE
862 && TYPE_LEFT_JUSTIFIED_MODULAR_P (right_type
))
864 right_operand
= convert (TREE_TYPE (TYPE_FIELDS (right_type
)),
866 right_type
= TREE_TYPE (right_operand
);
867 right_base_type
= get_base_type (right_type
);
870 /* If both objects are arrays, compare them specially. */
871 if ((TREE_CODE (left_type
) == ARRAY_TYPE
872 || (TREE_CODE (left_type
) == INTEGER_TYPE
873 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type
)))
874 && (TREE_CODE (right_type
) == ARRAY_TYPE
875 || (TREE_CODE (right_type
) == INTEGER_TYPE
876 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type
))))
878 result
= compare_arrays (result_type
, left_operand
, right_operand
);
880 if (op_code
== EQ_EXPR
)
882 else if (op_code
== NE_EXPR
)
883 result
= invert_truthvalue (result
);
890 /* Otherwise, the base types must be the same unless the objects are
891 records. If we have records, use the best type and convert both
892 operands to that type. */
893 if (left_base_type
!= right_base_type
)
895 if (TREE_CODE (left_base_type
) == RECORD_TYPE
896 && TREE_CODE (right_base_type
) == RECORD_TYPE
)
898 /* The only way these are permitted to be the same is if both
899 types have the same name. In that case, one of them must
900 not be self-referential. Use that one as the best type.
901 Even better is if one is of fixed size. */
904 if (TYPE_NAME (left_base_type
) == 0
905 || TYPE_NAME (left_base_type
) != TYPE_NAME (right_base_type
))
908 if (TREE_CONSTANT (TYPE_SIZE (left_base_type
)))
909 best_type
= left_base_type
;
910 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type
)))
911 best_type
= right_base_type
;
912 else if (! contains_placeholder_p (TYPE_SIZE (left_base_type
)))
913 best_type
= left_base_type
;
914 else if (! contains_placeholder_p (TYPE_SIZE (right_base_type
)))
915 best_type
= right_base_type
;
919 left_operand
= convert (best_type
, left_operand
);
920 right_operand
= convert (best_type
, right_operand
);
926 /* If we are comparing a fat pointer against zero, we need to
927 just compare the data pointer. */
928 else if (TYPE_FAT_POINTER_P (left_base_type
)
929 && TREE_CODE (right_operand
) == CONSTRUCTOR
930 && integer_zerop (TREE_VALUE (TREE_OPERAND (right_operand
, 1))))
932 right_operand
= build_component_ref (left_operand
, NULL_TREE
,
933 TYPE_FIELDS (left_base_type
));
934 left_operand
= convert (TREE_TYPE (right_operand
),
939 left_operand
= convert (left_base_type
, left_operand
);
940 right_operand
= convert (right_base_type
, right_operand
);
946 case PREINCREMENT_EXPR
:
947 case PREDECREMENT_EXPR
:
948 case POSTINCREMENT_EXPR
:
949 case POSTDECREMENT_EXPR
:
950 /* In these, the result type and the left operand type should be the
951 same. Do the operation in the base type of those and convert the
952 right operand (which is an integer) to that type.
954 Note that these operations are only used in loop control where
955 we guarantee that no overflow can occur. So nothing special need
956 be done for modular types. */
958 if (left_type
!= result_type
)
961 operation_type
= get_base_type (result_type
);
962 left_operand
= convert (operation_type
, left_operand
);
963 right_operand
= convert (operation_type
, right_operand
);
964 has_side_effects
= 1;
972 /* The RHS of a shift can be any type. Also, ignore any modulus
973 (we used to abort, but this is needed for unchecked conversion
974 to modular types). Otherwise, processing is the same as normal. */
975 if (operation_type
!= left_base_type
)
979 left_operand
= convert (operation_type
, left_operand
);
982 case TRUTH_ANDIF_EXPR
:
983 case TRUTH_ORIF_EXPR
:
987 left_operand
= gnat_truthvalue_conversion (left_operand
);
988 right_operand
= gnat_truthvalue_conversion (right_operand
);
994 /* For binary modulus, if the inputs are in range, so are the
996 if (modulus
!= 0 && integer_pow2p (modulus
))
1002 if (TREE_TYPE (result_type
) != left_base_type
1003 || TREE_TYPE (result_type
) != right_base_type
)
1006 left_operand
= convert (left_base_type
, left_operand
);
1007 right_operand
= convert (right_base_type
, right_operand
);
1010 case TRUNC_DIV_EXPR
: case TRUNC_MOD_EXPR
:
1011 case CEIL_DIV_EXPR
: case CEIL_MOD_EXPR
:
1012 case FLOOR_DIV_EXPR
: case FLOOR_MOD_EXPR
:
1013 case ROUND_DIV_EXPR
: case ROUND_MOD_EXPR
:
1014 /* These always produce results lower than either operand. */
1020 /* The result type should be the same as the base types of the
1021 both operands (and they should be the same). Convert
1022 everything to the result type. */
1024 if (operation_type
!= left_base_type
1025 || left_base_type
!= right_base_type
)
1028 left_operand
= convert (operation_type
, left_operand
);
1029 right_operand
= convert (operation_type
, right_operand
);
1032 if (modulus
!= 0 && ! integer_pow2p (modulus
))
1034 result
= nonbinary_modular_operation (op_code
, operation_type
,
1035 left_operand
, right_operand
);
1038 /* If either operand is a NULL_EXPR, just return a new one. */
1039 else if (TREE_CODE (left_operand
) == NULL_EXPR
)
1040 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (left_operand
, 0));
1041 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
1042 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (right_operand
, 0));
1044 result
= fold (build (op_code
, operation_type
,
1045 left_operand
, right_operand
));
1047 TREE_SIDE_EFFECTS (result
) |= has_side_effects
;
1048 TREE_CONSTANT (result
)
1049 |= (TREE_CONSTANT (left_operand
) & TREE_CONSTANT (right_operand
)
1050 && op_code
!= ARRAY_REF
&& op_code
!= ARRAY_RANGE_REF
);
1052 if ((op_code
== ARRAY_REF
|| op_code
== ARRAY_RANGE_REF
)
1053 && TYPE_VOLATILE (operation_type
))
1054 TREE_THIS_VOLATILE (result
) = 1;
1056 /* If we are working with modular types, perform the MOD operation
1057 if something above hasn't eliminated the need for it. */
1059 result
= fold (build (FLOOR_MOD_EXPR
, operation_type
, result
,
1060 convert (operation_type
, modulus
)));
1062 if (result_type
!= 0 && result_type
!= operation_type
)
1063 result
= convert (result_type
, result
);
1068 /* Similar, but for unary operations. */
1071 build_unary_op (op_code
, result_type
, operand
)
1072 enum tree_code op_code
;
1076 tree type
= TREE_TYPE (operand
);
1077 tree base_type
= get_base_type (type
);
1078 tree operation_type
= result_type
;
1080 int side_effects
= 0;
1082 /* If we have a WITH_RECORD_EXPR as our operand, do the operation first,
1083 then surround it with the WITH_RECORD_EXPR. This allows GCC to do better
1084 expression folding. */
1085 if (TREE_CODE (operand
) == WITH_RECORD_EXPR
)
1087 result
= build_unary_op (op_code
, result_type
,
1088 TREE_OPERAND (operand
, 0));
1089 return build (WITH_RECORD_EXPR
, TREE_TYPE (result
), result
,
1090 TREE_OPERAND (operand
, 1));
1093 if (operation_type
!= 0
1094 && TREE_CODE (operation_type
) == RECORD_TYPE
1095 && TYPE_LEFT_JUSTIFIED_MODULAR_P (operation_type
))
1096 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
1098 if (operation_type
!= 0
1099 && ! AGGREGATE_TYPE_P (operation_type
)
1100 && TYPE_EXTRA_SUBTYPE_P (operation_type
))
1101 operation_type
= get_base_type (operation_type
);
1107 if (operation_type
== 0)
1108 result_type
= operation_type
= TREE_TYPE (type
);
1109 else if (result_type
!= TREE_TYPE (type
))
1112 result
= fold (build1 (op_code
, operation_type
, operand
));
1115 case TRUTH_NOT_EXPR
:
1116 if (result_type
!= base_type
)
1119 result
= invert_truthvalue (gnat_truthvalue_conversion (operand
));
1122 case ATTR_ADDR_EXPR
:
1124 switch (TREE_CODE (operand
))
1127 case UNCONSTRAINED_ARRAY_REF
:
1128 result
= TREE_OPERAND (operand
, 0);
1130 /* Make sure the type here is a pointer, not a reference.
1131 GCC wants pointer types for function addresses. */
1132 if (result_type
== 0)
1133 result_type
= build_pointer_type (type
);
1138 TREE_TYPE (result
) = type
= build_pointer_type (type
);
1142 case ARRAY_RANGE_REF
:
1145 /* If this is for 'Address, find the address of the prefix and
1146 add the offset to the field. Otherwise, do this the normal
1148 if (op_code
== ATTR_ADDR_EXPR
)
1150 HOST_WIDE_INT bitsize
;
1151 HOST_WIDE_INT bitpos
;
1153 enum machine_mode mode
;
1154 int unsignedp
, volatilep
;
1156 inner
= get_inner_reference (operand
, &bitsize
, &bitpos
, &offset
,
1157 &mode
, &unsignedp
, &volatilep
);
1159 /* If INNER is a padding type whose field has a self-referential
1160 size, convert to that inner type. We know the offset is zero
1161 and we need to have that type visible. */
1162 if (TREE_CODE (TREE_TYPE (inner
)) == RECORD_TYPE
1163 && TYPE_IS_PADDING_P (TREE_TYPE (inner
))
1164 && (contains_placeholder_p
1165 (TYPE_SIZE (TREE_TYPE (TYPE_FIELDS
1166 (TREE_TYPE (inner
)))))))
1167 inner
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner
))),
1170 /* Compute the offset as a byte offset from INNER. */
1172 offset
= size_zero_node
;
1174 if (bitpos
% BITS_PER_UNIT
!= 0)
1176 ("taking address of object not aligned on storage unit?",
1179 offset
= size_binop (PLUS_EXPR
, offset
,
1180 size_int (bitpos
/ BITS_PER_UNIT
));
1182 /* Take the address of INNER, convert the offset to void *, and
1183 add then. It will later be converted to the desired result
1185 inner
= build_unary_op (ADDR_EXPR
, NULL_TREE
, inner
);
1186 inner
= convert (ptr_void_type_node
, inner
);
1187 offset
= convert (ptr_void_type_node
, offset
);
1188 result
= build_binary_op (PLUS_EXPR
, ptr_void_type_node
,
1190 result
= convert (build_pointer_type (TREE_TYPE (operand
)),
1197 /* If this is just a constructor for a padded record, we can
1198 just take the address of the single field and convert it to
1199 a pointer to our type. */
1200 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
1203 = build_unary_op (ADDR_EXPR
, NULL_TREE
,
1204 TREE_VALUE (CONSTRUCTOR_ELTS (operand
)));
1205 result
= convert (build_pointer_type (TREE_TYPE (operand
)),
1213 if (AGGREGATE_TYPE_P (type
)
1214 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand
, 0))))
1215 return build_unary_op (ADDR_EXPR
, result_type
,
1216 TREE_OPERAND (operand
, 0));
1218 /* If this NOP_EXPR doesn't change the mode, get the result type
1219 from this type and go down. We need to do this in case
1220 this is a conversion of a CONST_DECL. */
1221 if (TYPE_MODE (type
) != BLKmode
1222 && (TYPE_MODE (type
)
1223 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand
, 0)))))
1224 return build_unary_op (ADDR_EXPR
,
1226 ? build_pointer_type (type
)
1228 TREE_OPERAND (operand
, 0));
1232 operand
= DECL_CONST_CORRESPONDING_VAR (operand
);
1234 /* ... fall through ... */
1239 if (type
!= error_mark_node
)
1240 operation_type
= build_pointer_type (type
);
1242 gnat_mark_addressable (operand
);
1243 result
= fold (build1 (ADDR_EXPR
, operation_type
, operand
));
1246 TREE_CONSTANT (result
) = staticp (operand
) || TREE_CONSTANT (operand
);
1250 /* If we want to refer to an entire unconstrained array,
1251 make up an expression to do so. This will never survive to
1252 the backend. If TYPE is a thin pointer, first convert the
1253 operand to a fat pointer. */
1254 if (TYPE_THIN_POINTER_P (type
)
1255 && TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
)) != 0)
1258 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (TREE_TYPE (type
))),
1260 type
= TREE_TYPE (operand
);
1263 if (TYPE_FAT_POINTER_P (type
))
1264 result
= build1 (UNCONSTRAINED_ARRAY_REF
,
1265 TYPE_UNCONSTRAINED_ARRAY (type
), operand
);
1267 else if (TREE_CODE (operand
) == ADDR_EXPR
)
1268 result
= TREE_OPERAND (operand
, 0);
1272 result
= fold (build1 (op_code
, TREE_TYPE (type
), operand
));
1273 TREE_READONLY (result
) = TREE_READONLY (TREE_TYPE (type
));
1276 side_effects
= flag_volatile
1277 || (! TYPE_FAT_POINTER_P (type
) && TYPE_VOLATILE (TREE_TYPE (type
)));
1283 tree modulus
= ((operation_type
!= 0
1284 && TREE_CODE (operation_type
) == INTEGER_TYPE
1285 && TYPE_MODULAR_P (operation_type
))
1286 ? TYPE_MODULUS (operation_type
) : 0);
1287 int mod_pow2
= modulus
!= 0 && integer_pow2p (modulus
);
1289 /* If this is a modular type, there are various possibilities
1290 depending on the operation and whether the modulus is a
1291 power of two or not. */
1295 if (operation_type
!= base_type
)
1298 operand
= convert (operation_type
, operand
);
1300 /* The fastest in the negate case for binary modulus is
1301 the straightforward code; the TRUNC_MOD_EXPR below
1302 is an AND operation. */
1303 if (op_code
== NEGATE_EXPR
&& mod_pow2
)
1304 result
= fold (build (TRUNC_MOD_EXPR
, operation_type
,
1305 fold (build1 (NEGATE_EXPR
, operation_type
,
1309 /* For nonbinary negate case, return zero for zero operand,
1310 else return the modulus minus the operand. If the modulus
1311 is a power of two minus one, we can do the subtraction
1312 as an XOR since it is equivalent and faster on most machines. */
1313 else if (op_code
== NEGATE_EXPR
&& ! mod_pow2
)
1315 if (integer_pow2p (fold (build (PLUS_EXPR
, operation_type
,
1317 convert (operation_type
,
1318 integer_one_node
)))))
1319 result
= fold (build (BIT_XOR_EXPR
, operation_type
,
1322 result
= fold (build (MINUS_EXPR
, operation_type
,
1325 result
= fold (build (COND_EXPR
, operation_type
,
1326 fold (build (NE_EXPR
, integer_type_node
,
1328 convert (operation_type
,
1329 integer_zero_node
))),
1334 /* For the NOT cases, we need a constant equal to
1335 the modulus minus one. For a binary modulus, we
1336 XOR against the constant and subtract the operand from
1337 that constant for nonbinary modulus. */
1339 tree cnst
= fold (build (MINUS_EXPR
, operation_type
, modulus
,
1340 convert (operation_type
,
1341 integer_one_node
)));
1344 result
= fold (build (BIT_XOR_EXPR
, operation_type
,
1347 result
= fold (build (MINUS_EXPR
, operation_type
,
1355 /* ... fall through ... */
1358 if (operation_type
!= base_type
)
1361 result
= fold (build1 (op_code
, operation_type
, convert (operation_type
,
1367 TREE_SIDE_EFFECTS (result
) = 1;
1368 if (TREE_CODE (result
) == INDIRECT_REF
)
1369 TREE_THIS_VOLATILE (result
) = TYPE_VOLATILE (TREE_TYPE (result
));
1372 if (result_type
!= 0 && TREE_TYPE (result
) != result_type
)
1373 result
= convert (result_type
, result
);
1378 /* Similar, but for COND_EXPR. */
1381 build_cond_expr (result_type
, condition_operand
, true_operand
, false_operand
)
1383 tree condition_operand
;
1390 /* Front-end verifies that result, true and false operands have same base
1391 type. Convert everything to the result type. */
1393 true_operand
= convert (result_type
, true_operand
);
1394 false_operand
= convert (result_type
, false_operand
);
1396 /* If the result type is unconstrained, take the address of
1397 the operands and then dereference our result. */
1399 if (TREE_CODE (result_type
) == UNCONSTRAINED_ARRAY_TYPE
1400 || (TREE_CODE (TYPE_SIZE (result_type
)) != INTEGER_CST
1401 && contains_placeholder_p (TYPE_SIZE (result_type
))))
1404 result_type
= build_pointer_type (result_type
);
1405 true_operand
= build_unary_op (ADDR_EXPR
, result_type
, true_operand
);
1406 false_operand
= build_unary_op (ADDR_EXPR
, result_type
, false_operand
);
1409 result
= fold (build (COND_EXPR
, result_type
, condition_operand
,
1410 true_operand
, false_operand
));
1412 /* If either operand is a SAVE_EXPR (possibly surrounded by
1413 arithmetic, make sure it gets done. */
1414 while (TREE_CODE_CLASS (TREE_CODE (true_operand
)) == '1'
1415 || (TREE_CODE_CLASS (TREE_CODE (true_operand
)) == '2'
1416 && TREE_CONSTANT (TREE_OPERAND (true_operand
, 1))))
1417 true_operand
= TREE_OPERAND (true_operand
, 0);
1419 while (TREE_CODE_CLASS (TREE_CODE (false_operand
)) == '1'
1420 || (TREE_CODE_CLASS (TREE_CODE (false_operand
)) == '2'
1421 && TREE_CONSTANT (TREE_OPERAND (false_operand
, 1))))
1422 false_operand
= TREE_OPERAND (false_operand
, 0);
1424 if (TREE_CODE (true_operand
) == SAVE_EXPR
)
1425 result
= build (COMPOUND_EXPR
, result_type
, true_operand
, result
);
1426 if (TREE_CODE (false_operand
) == SAVE_EXPR
)
1427 result
= build (COMPOUND_EXPR
, result_type
, false_operand
, result
);
1430 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1436 /* Build a CALL_EXPR to call FUNDECL with one argument, ARG. Return
1440 build_call_1_expr (fundecl
, arg
)
1444 tree call
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1445 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1446 chainon (NULL_TREE
, build_tree_list (NULL_TREE
, arg
)),
1449 TREE_SIDE_EFFECTS (call
) = 1;
1454 /* Build a CALL_EXPR to call FUNDECL with two arguments, ARG1 & ARG2. Return
1458 build_call_2_expr (fundecl
, arg1
, arg2
)
1462 tree call
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1463 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1464 chainon (chainon (NULL_TREE
,
1465 build_tree_list (NULL_TREE
, arg1
)),
1466 build_tree_list (NULL_TREE
, arg2
)),
1469 TREE_SIDE_EFFECTS (call
) = 1;
1474 /* Likewise to call FUNDECL with no arguments. */
1477 build_call_0_expr (fundecl
)
1480 tree call
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (fundecl
)),
1481 build_unary_op (ADDR_EXPR
, NULL_TREE
, fundecl
),
1482 NULL_TREE
, NULL_TREE
);
1484 TREE_SIDE_EFFECTS (call
) = 1;
1489 /* Call a function that raises an exception and pass the line number and file
1490 name, if requested. MSG says which exception function to call. */
1493 build_call_raise (msg
)
1496 tree fndecl
= gnat_raise_decls
[msg
];
1497 const char *str
= discard_file_names
? "" : ref_filename
;
1498 int len
= strlen (str
) + 1;
1499 tree filename
= build_string (len
, str
);
1501 TREE_TYPE (filename
)
1502 = build_array_type (char_type_node
,
1503 build_index_type (build_int_2 (len
, 0)));
1506 build_call_2_expr (fndecl
,
1507 build1 (ADDR_EXPR
, build_pointer_type (char_type_node
),
1509 build_int_2 (lineno
, 0));
1512 /* Return a CONSTRUCTOR of TYPE whose list is LIST. */
1515 build_constructor (type
, list
)
1520 int allconstant
= (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
);
1521 int side_effects
= 0;
1524 for (elmt
= list
; elmt
; elmt
= TREE_CHAIN (elmt
))
1526 if (! TREE_CONSTANT (TREE_VALUE (elmt
))
1527 || (TREE_CODE (type
) == RECORD_TYPE
1528 && DECL_BIT_FIELD (TREE_PURPOSE (elmt
))
1529 && TREE_CODE (TREE_VALUE (elmt
)) != INTEGER_CST
))
1532 if (TREE_SIDE_EFFECTS (TREE_VALUE (elmt
)))
1535 /* Propagate an NULL_EXPR from the size of the type. We won't ever
1536 be executing the code we generate here in that case, but handle it
1537 specially to avoid the cmpiler blowing up. */
1538 if (TREE_CODE (type
) == RECORD_TYPE
1540 = contains_null_expr (DECL_SIZE (TREE_PURPOSE (elmt
))))))
1541 return build1 (NULL_EXPR
, type
, TREE_OPERAND (result
, 0));
1544 /* If TYPE is a RECORD_TYPE and the fields are not in the
1545 same order as their bit position, don't treat this as constant
1546 since varasm.c can't handle it. */
1547 if (allconstant
&& TREE_CODE (type
) == RECORD_TYPE
)
1549 tree last_pos
= bitsize_zero_node
;
1552 for (field
= TYPE_FIELDS (type
); field
; field
= TREE_CHAIN (field
))
1554 tree this_pos
= bit_position (field
);
1556 if (TREE_CODE (this_pos
) != INTEGER_CST
1557 || tree_int_cst_lt (this_pos
, last_pos
))
1563 last_pos
= this_pos
;
1567 result
= build (CONSTRUCTOR
, type
, NULL_TREE
, list
);
1568 TREE_CONSTANT (result
) = allconstant
;
1569 TREE_STATIC (result
) = allconstant
;
1570 TREE_SIDE_EFFECTS (result
) = side_effects
;
1571 TREE_READONLY (result
) = TREE_READONLY (type
);
1576 /* Return a COMPONENT_REF to access a field that is given by COMPONENT,
1577 an IDENTIFIER_NODE giving the name of the field, or FIELD, a FIELD_DECL,
1580 We also handle the fact that we might have been passed a pointer to the
1581 actual record and know how to look for fields in variant parts. */
1584 build_simple_component_ref (record_variable
, component
, field
)
1585 tree record_variable
;
1589 tree record_type
= TYPE_MAIN_VARIANT (TREE_TYPE (record_variable
));
1592 if ((TREE_CODE (record_type
) != RECORD_TYPE
1593 && TREE_CODE (record_type
) != UNION_TYPE
1594 && TREE_CODE (record_type
) != QUAL_UNION_TYPE
)
1595 || TYPE_SIZE (record_type
) == 0)
1598 /* Either COMPONENT or FIELD must be specified, but not both. */
1599 if ((component
!= 0) == (field
!= 0))
1602 /* If no field was specified, look for a field with the specified name
1603 in the current record only. */
1605 for (field
= TYPE_FIELDS (record_type
); field
;
1606 field
= TREE_CHAIN (field
))
1607 if (DECL_NAME (field
) == component
)
1613 /* If this field is not in the specified record, see if we can find
1614 something in the record whose original field is the same as this one. */
1615 if (DECL_CONTEXT (field
) != record_type
)
1616 /* Check if there is a field with name COMPONENT in the record. */
1620 /* First loop thru normal components. */
1622 for (new_field
= TYPE_FIELDS (record_type
); new_field
!= 0;
1623 new_field
= TREE_CHAIN (new_field
))
1624 if (DECL_ORIGINAL_FIELD (new_field
) == field
1625 || new_field
== DECL_ORIGINAL_FIELD (field
)
1626 || (DECL_ORIGINAL_FIELD (field
) != 0
1627 && (DECL_ORIGINAL_FIELD (field
)
1628 == DECL_ORIGINAL_FIELD (new_field
))))
1631 /* Next, loop thru DECL_INTERNAL_P components if we haven't found
1632 the component in the first search. Doing this search in 2 steps
1633 is required to avoiding hidden homonymous fields in the
1637 for (new_field
= TYPE_FIELDS (record_type
); new_field
!= 0;
1638 new_field
= TREE_CHAIN (new_field
))
1639 if (DECL_INTERNAL_P (new_field
))
1642 = build_simple_component_ref (record_variable
,
1643 NULL_TREE
, new_field
);
1644 ref
= build_simple_component_ref (field_ref
, NULL_TREE
, field
);
1656 /* It would be nice to call "fold" here, but that can lose a type
1657 we need to tag a PLACEHOLDER_EXPR with, so we can't do it. */
1658 ref
= build (COMPONENT_REF
, TREE_TYPE (field
), record_variable
, field
);
1660 if (TREE_READONLY (record_variable
) || TREE_READONLY (field
))
1661 TREE_READONLY (ref
) = 1;
1662 if (TREE_THIS_VOLATILE (record_variable
) || TREE_THIS_VOLATILE (field
)
1663 || TYPE_VOLATILE (record_type
))
1664 TREE_THIS_VOLATILE (ref
) = 1;
1669 /* Like build_simple_component_ref, except that we give an error if the
1670 reference could not be found. */
1673 build_component_ref (record_variable
, component
, field
)
1674 tree record_variable
;
1678 tree ref
= build_simple_component_ref (record_variable
, component
, field
);
1683 /* If FIELD was specified, assume this is an invalid user field so
1684 raise constraint error. Otherwise, we can't find the type to return, so
1687 else if (field
!= 0)
1688 return build1 (NULL_EXPR
, TREE_TYPE (field
),
1689 build_call_raise (CE_Discriminant_Check_Failed
));
1694 /* Build a GCC tree to call an allocation or deallocation function.
1695 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
1696 generate an allocator.
1698 GNU_SIZE is the size of the object in bytes and ALIGN is the alignment in
1699 bits. GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the
1700 storage pool to use. If not preset, malloc and free will be used except
1701 if GNAT_PROC is the "fake" value of -1, in which case we allocate the
1702 object dynamically on the stack frame. */
1705 build_call_alloc_dealloc (gnu_obj
, gnu_size
, align
, gnat_proc
, gnat_pool
)
1709 Entity_Id gnat_proc
;
1710 Entity_Id gnat_pool
;
1712 tree gnu_align
= size_int (align
/ BITS_PER_UNIT
);
1714 if (TREE_CODE (gnu_size
) != INTEGER_CST
&& contains_placeholder_p (gnu_size
))
1715 gnu_size
= build (WITH_RECORD_EXPR
, sizetype
, gnu_size
,
1716 build_unary_op (INDIRECT_REF
, NULL_TREE
, gnu_obj
));
1718 if (Present (gnat_proc
))
1720 /* The storage pools are obviously always tagged types, but the
1721 secondary stack uses the same mechanism and is not tagged */
1722 if (Is_Tagged_Type (Etype (gnat_pool
)))
1724 /* The size is the third parameter; the alignment is the
1726 Entity_Id gnat_size_type
1727 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc
))));
1728 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
1729 tree gnu_proc
= gnat_to_gnu (gnat_proc
);
1730 tree gnu_proc_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_proc
);
1731 tree gnu_pool
= gnat_to_gnu (gnat_pool
);
1732 tree gnu_pool_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_pool
);
1733 tree gnu_args
= NULL_TREE
;
1736 /* The first arg is always the address of the storage pool; next
1737 comes the address of the object, for a deallocator, then the
1738 size and alignment. */
1740 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_pool_addr
));
1744 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_obj
));
1747 = chainon (gnu_args
,
1748 build_tree_list (NULL_TREE
,
1749 convert (gnu_size_type
, gnu_size
)));
1751 = chainon (gnu_args
,
1752 build_tree_list (NULL_TREE
,
1753 convert (gnu_size_type
, gnu_align
)));
1755 gnu_call
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (gnu_proc
)),
1756 gnu_proc_addr
, gnu_args
, NULL_TREE
);
1757 TREE_SIDE_EFFECTS (gnu_call
) = 1;
1761 /* Secondary stack case. */
1764 /* The size is the second parameter */
1765 Entity_Id gnat_size_type
1766 = Etype (Next_Formal (First_Formal (gnat_proc
)));
1767 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
1768 tree gnu_proc
= gnat_to_gnu (gnat_proc
);
1769 tree gnu_proc_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_proc
);
1770 tree gnu_args
= NULL_TREE
;
1773 /* The first arg is the address of the object, for a
1774 deallocator, then the size */
1777 = chainon (gnu_args
, build_tree_list (NULL_TREE
, gnu_obj
));
1780 = chainon (gnu_args
,
1781 build_tree_list (NULL_TREE
,
1782 convert (gnu_size_type
, gnu_size
)));
1784 gnu_call
= build (CALL_EXPR
, TREE_TYPE (TREE_TYPE (gnu_proc
)),
1785 gnu_proc_addr
, gnu_args
, NULL_TREE
);
1786 TREE_SIDE_EFFECTS (gnu_call
) = 1;
1792 return build_call_1_expr (free_decl
, gnu_obj
);
1793 else if (gnat_pool
== -1)
1795 /* If the size is a constant, we can put it in the fixed portion of
1796 the stack frame to avoid the need to adjust the stack pointer. */
1797 if (TREE_CODE (gnu_size
) == INTEGER_CST
&& ! flag_stack_check
)
1800 = build_range_type (NULL_TREE
, size_one_node
, gnu_size
);
1801 tree gnu_array_type
= build_array_type (char_type_node
, gnu_range
);
1803 create_var_decl (get_identifier ("RETVAL"), NULL_TREE
,
1804 gnu_array_type
, NULL_TREE
, 0, 0, 0, 0, 0);
1806 return convert (ptr_void_type_node
,
1807 build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_decl
));
1810 return build (ALLOCATE_EXPR
, ptr_void_type_node
, gnu_size
, gnu_align
);
1813 return build_call_1_expr (malloc_decl
, gnu_size
);
1816 /* Build a GCC tree to correspond to allocating an object of TYPE whose
1817 initial value is INIT, if INIT is nonzero. Convert the expression to
1818 RESULT_TYPE, which must be some type of pointer. Return the tree.
1819 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
1820 the storage pool to use. */
1823 build_allocator (type
, init
, result_type
, gnat_proc
, gnat_pool
)
1827 Entity_Id gnat_proc
;
1828 Entity_Id gnat_pool
;
1830 tree size
= TYPE_SIZE_UNIT (type
);
1833 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
1834 if (init
!= 0 && TREE_CODE (init
) == NULL_EXPR
)
1835 return build1 (NULL_EXPR
, result_type
, TREE_OPERAND (init
, 0));
1837 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
1838 sizes of the object and its template. Allocate the whole thing and
1839 fill in the parts that are known. */
1840 else if (TYPE_FAT_OR_THIN_POINTER_P (result_type
))
1843 = (TYPE_FAT_POINTER_P (result_type
)
1844 ? TREE_TYPE (TREE_TYPE (TREE_CHAIN (TYPE_FIELDS (result_type
))))
1845 : TREE_TYPE (TYPE_FIELDS (TREE_TYPE (result_type
))));
1847 = build_unc_object_type (template_type
, type
,
1848 get_identifier ("ALLOC"));
1849 tree storage_ptr_type
= build_pointer_type (storage_type
);
1851 tree template_cons
= NULL_TREE
;
1853 size
= TYPE_SIZE_UNIT (storage_type
);
1855 if (TREE_CODE (size
) != INTEGER_CST
1856 && contains_placeholder_p (size
))
1857 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, init
);
1859 /* If the size overflows, pass -1 so the allocator will raise
1861 if (TREE_CODE (size
) == INTEGER_CST
&& TREE_OVERFLOW (size
))
1862 size
= ssize_int (-1);
1864 storage
= build_call_alloc_dealloc (NULL_TREE
, size
,
1865 TYPE_ALIGN (storage_type
),
1866 gnat_proc
, gnat_pool
);
1867 storage
= convert (storage_ptr_type
, protect_multiple_eval (storage
));
1869 if (TREE_CODE (type
) == RECORD_TYPE
&& TYPE_IS_PADDING_P (type
))
1871 type
= TREE_TYPE (TYPE_FIELDS (type
));
1874 init
= convert (type
, init
);
1877 /* If there is an initializing expression, make a constructor for
1878 the entire object including the bounds and copy it into the
1879 object. If there is no initializing expression, just set the
1883 template_cons
= tree_cons (TREE_CHAIN (TYPE_FIELDS (storage_type
)),
1885 template_cons
= tree_cons (TYPE_FIELDS (storage_type
),
1886 build_template (template_type
, type
,
1892 build (COMPOUND_EXPR
, storage_ptr_type
,
1894 (MODIFY_EXPR
, storage_type
,
1895 build_unary_op (INDIRECT_REF
, NULL_TREE
,
1896 convert (storage_ptr_type
, storage
)),
1897 build_constructor (storage_type
, template_cons
)),
1898 convert (storage_ptr_type
, storage
)));
1902 (COMPOUND_EXPR
, result_type
,
1904 (MODIFY_EXPR
, template_type
,
1906 (build_unary_op (INDIRECT_REF
, NULL_TREE
,
1907 convert (storage_ptr_type
, storage
)),
1908 NULL_TREE
, TYPE_FIELDS (storage_type
)),
1909 build_template (template_type
, type
, NULL_TREE
)),
1910 convert (result_type
, convert (storage_ptr_type
, storage
)));
1913 /* If we have an initializing expression, see if its size is simpler
1914 than the size from the type. */
1915 if (init
!= 0 && TYPE_SIZE_UNIT (TREE_TYPE (init
)) != 0
1916 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init
))) == INTEGER_CST
1917 || (TREE_CODE (size
) != INTEGER_CST
1918 && contains_placeholder_p (size
))))
1919 size
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
1921 /* If the size is still self-referential, reference the initializing
1922 expression, if it is present. If not, this must have been a
1923 call to allocate a library-level object, in which case we use
1924 the maximum size. */
1925 if (TREE_CODE (size
) != INTEGER_CST
&& contains_placeholder_p (size
))
1928 size
= max_size (size
, 1);
1930 size
= build (WITH_RECORD_EXPR
, sizetype
, size
, init
);
1933 /* If the size overflows, pass -1 so the allocator will raise
1935 if (TREE_CODE (size
) == INTEGER_CST
&& TREE_OVERFLOW (size
))
1936 size
= ssize_int (-1);
1938 /* If this is a type whose alignment is larger than the
1939 biggest we support in normal alignment and this is in
1940 the default storage pool, make an "aligning type", allocate
1941 it, point to the field we need, and return that. */
1942 if (TYPE_ALIGN (type
) > BIGGEST_ALIGNMENT
1945 tree new_type
= make_aligning_type (type
, TYPE_ALIGN (type
), size
);
1947 result
= build_call_alloc_dealloc (NULL_TREE
, TYPE_SIZE (new_type
),
1948 BIGGEST_ALIGNMENT
, Empty
, Empty
);
1949 result
= save_expr (result
);
1950 result
= convert (build_pointer_type (new_type
), result
);
1951 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1952 result
= build_component_ref (result
, NULL_TREE
,
1953 TYPE_FIELDS (new_type
));
1954 result
= convert (result_type
,
1955 build_unary_op (ADDR_EXPR
, NULL_TREE
, result
));
1958 result
= convert (result_type
,
1959 build_call_alloc_dealloc (NULL_TREE
, size
,
1961 gnat_proc
, gnat_pool
));
1963 /* If we have an initial value, put the new address into a SAVE_EXPR, assign
1964 the value, and return the address. Do this with a COMPOUND_EXPR. */
1968 result
= save_expr (result
);
1970 = build (COMPOUND_EXPR
, TREE_TYPE (result
),
1972 (MODIFY_EXPR
, TREE_TYPE (TREE_TYPE (result
)),
1973 build_unary_op (INDIRECT_REF
, TREE_TYPE (TREE_TYPE (result
)),
1979 return convert (result_type
, result
);
1982 /* Fill in a VMS descriptor for EXPR and return a constructor for it.
1983 GNAT_FORMAL is how we find the descriptor record. */
1986 fill_vms_descriptor (expr
, gnat_formal
)
1988 Entity_Id gnat_formal
;
1990 tree record_type
= TREE_TYPE (TREE_TYPE (get_gnu_tree (gnat_formal
)));
1992 tree const_list
= 0;
1994 expr
= maybe_unconstrained_array (expr
);
1995 gnat_mark_addressable (expr
);
1997 for (field
= TYPE_FIELDS (record_type
); field
; field
= TREE_CHAIN (field
))
1999 tree init
= DECL_INITIAL (field
);
2001 if (TREE_CODE (init
) != INTEGER_CST
2002 && contains_placeholder_p (init
))
2003 init
= build (WITH_RECORD_EXPR
, TREE_TYPE (init
), init
, expr
);
2005 const_list
= tree_cons (field
, convert (TREE_TYPE (field
), init
),
2009 return build_constructor (record_type
, nreverse (const_list
));
2012 /* Indicate that we need to make the address of EXPR_NODE and it therefore
2013 should not be allocated in a register. Returns true if successful. */
2016 gnat_mark_addressable (expr_node
)
2020 switch (TREE_CODE (expr_node
))
2025 case ARRAY_RANGE_REF
:
2029 expr_node
= TREE_OPERAND (expr_node
, 0);
2033 TREE_ADDRESSABLE (expr_node
) = 1;
2039 put_var_into_stack (expr_node
);
2040 TREE_ADDRESSABLE (expr_node
) = 1;
2044 TREE_ADDRESSABLE (expr_node
) = 1;
2048 return (DECL_CONST_CORRESPONDING_VAR (expr_node
) != 0
2049 && (gnat_mark_addressable
2050 (DECL_CONST_CORRESPONDING_VAR (expr_node
))));