1 /****************************************************************************
3 * GNAT COMPILER COMPONENTS *
7 * C Implementation File *
9 * Copyright (C) 1992-2022, Free Software Foundation, Inc. *
11 * GNAT is free software; you can redistribute it and/or modify it under *
12 * terms of the GNU General Public License as published by the Free Soft- *
13 * ware Foundation; either version 3, or (at your option) any later ver- *
14 * sion. GNAT is distributed in the hope that it will be useful, but WITH- *
15 * OUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY *
16 * or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
17 * for more details. You should have received a copy of the GNU General *
18 * Public License along with GCC; see the file COPYING3. If not see *
19 * <http://www.gnu.org/licenses/>. *
21 * GNAT was originally developed by the GNAT team at New York University. *
22 * Extensive contributions were provided by Ada Core Technologies Inc. *
24 ****************************************************************************/
28 #include "coretypes.h"
36 #include "fold-const.h"
37 #include "stor-layout.h"
38 #include "stringpool.h"
43 #include "tree-inline.h"
60 /* Return the base type of TYPE. */
63 get_base_type (tree type
)
65 if (TREE_CODE (type
) == RECORD_TYPE
66 && TYPE_JUSTIFIED_MODULAR_P (type
))
67 type
= TREE_TYPE (TYPE_FIELDS (type
));
69 while (TREE_TYPE (type
)
70 && (TREE_CODE (type
) == INTEGER_TYPE
71 || TREE_CODE (type
) == REAL_TYPE
))
72 type
= TREE_TYPE (type
);
77 /* EXP is a GCC tree representing an address. See if we can find how strictly
78 the object at this address is aligned and, if so, return the alignment of
79 the object in bits. Otherwise return 0. */
82 known_alignment (tree exp
)
84 unsigned int this_alignment
;
85 unsigned int lhs
, rhs
;
87 switch (TREE_CODE (exp
))
90 case VIEW_CONVERT_EXPR
:
92 /* Conversions between pointers and integers don't change the alignment
93 of the underlying object. */
94 this_alignment
= known_alignment (TREE_OPERAND (exp
, 0));
98 /* The value of a COMPOUND_EXPR is that of its second operand. */
99 this_alignment
= known_alignment (TREE_OPERAND (exp
, 1));
104 /* If two addresses are added, the alignment of the result is the
105 minimum of the two alignments. */
106 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
107 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
108 this_alignment
= MIN (lhs
, rhs
);
111 case POINTER_PLUS_EXPR
:
112 /* If this is the pattern built for aligning types, decode it. */
113 if (TREE_CODE (TREE_OPERAND (exp
, 1)) == BIT_AND_EXPR
114 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 1), 0)) == NEGATE_EXPR
)
116 tree op
= TREE_OPERAND (TREE_OPERAND (exp
, 1), 1);
118 known_alignment (fold_build1 (BIT_NOT_EXPR
, TREE_TYPE (op
), op
));
121 /* If we don't know the alignment of the offset, we assume that
123 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
124 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
127 this_alignment
= lhs
;
129 this_alignment
= MIN (lhs
, rhs
);
133 /* If there is a choice between two values, use the smaller one. */
134 lhs
= known_alignment (TREE_OPERAND (exp
, 1));
135 rhs
= known_alignment (TREE_OPERAND (exp
, 2));
136 this_alignment
= MIN (lhs
, rhs
);
141 unsigned HOST_WIDE_INT c
= TREE_INT_CST_LOW (exp
);
142 /* The first part of this represents the lowest bit in the constant,
143 but it is originally in bytes, not bits. */
144 this_alignment
= (c
& -c
) * BITS_PER_UNIT
;
149 /* If we know the alignment of just one side, use it. Otherwise,
150 use the product of the alignments. */
151 lhs
= known_alignment (TREE_OPERAND (exp
, 0));
152 rhs
= known_alignment (TREE_OPERAND (exp
, 1));
155 this_alignment
= rhs
;
157 this_alignment
= lhs
;
159 this_alignment
= MIN (lhs
* rhs
, BIGGEST_ALIGNMENT
);
163 /* A bit-and expression is as aligned as the maximum alignment of the
164 operands. We typically get here for a complex lhs and a constant
165 negative power of two on the rhs to force an explicit alignment, so
166 don't bother looking at the lhs. */
167 this_alignment
= known_alignment (TREE_OPERAND (exp
, 1));
171 if (DECL_P (TREE_OPERAND (exp
, 0)))
172 this_alignment
= DECL_ALIGN (TREE_OPERAND (exp
, 0));
174 this_alignment
= get_object_alignment (TREE_OPERAND (exp
, 0));
179 tree fndecl
= get_callee_fndecl (exp
);
180 if (fndecl
== malloc_decl
|| fndecl
== realloc_decl
)
181 return get_target_system_allocator_alignment () * BITS_PER_UNIT
;
183 tree t
= maybe_inline_call_in_expr (exp
);
185 return known_alignment (t
);
188 /* ... fall through ... */
191 /* For other pointer expressions, we assume that the pointed-to object
192 is at least as aligned as the pointed-to type. Beware that we can
193 have a dummy type here (e.g. a Taft Amendment type), for which the
194 alignment is meaningless and should be ignored. */
195 if (POINTER_TYPE_P (TREE_TYPE (exp
))
196 && !TYPE_IS_DUMMY_P (TREE_TYPE (TREE_TYPE (exp
)))
197 && !VOID_TYPE_P (TREE_TYPE (TREE_TYPE (exp
))))
198 this_alignment
= TYPE_ALIGN (TREE_TYPE (TREE_TYPE (exp
)));
204 return this_alignment
;
207 /* We have a comparison or assignment operation on two types, T1 and T2, which
208 are either both array types or both record types. T1 is assumed to be for
209 the left hand side operand, and T2 for the right hand side. Return the
210 type that both operands should be converted to for the operation, if any.
211 Otherwise return zero. */
214 find_common_type (tree t1
, tree t2
)
216 /* ??? As of today, various constructs lead to here with types of different
217 sizes even when both constants (e.g. tagged types, packable vs regular
218 component types, padded vs unpadded types, ...). While some of these
219 would better be handled upstream (types should be made consistent before
220 calling into build_binary_op), some others are really expected and we
221 have to be careful. */
223 const bool variable_record_on_lhs
224 = (TREE_CODE (t1
) == RECORD_TYPE
225 && TREE_CODE (t2
) == RECORD_TYPE
226 && get_variant_part (t1
)
227 && !get_variant_part (t2
));
229 const bool variable_array_on_lhs
230 = (TREE_CODE (t1
) == ARRAY_TYPE
231 && TREE_CODE (t2
) == ARRAY_TYPE
232 && !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t1
)))
233 && TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (t2
))));
235 /* We must avoid writing more than what the target can hold if this is for
236 an assignment and the case of tagged types is handled in build_binary_op
237 so we use the lhs type if it is known to be smaller or of constant size
238 and the rhs type is not, whatever the modes. We also force t1 in case of
239 constant size equality to minimize occurrences of view conversions on the
240 lhs of an assignment, except for the case of types with a variable part
241 on the lhs but not on the rhs to make the conversion simpler. */
242 if (TREE_CONSTANT (TYPE_SIZE (t1
))
243 && (!TREE_CONSTANT (TYPE_SIZE (t2
))
244 || tree_int_cst_lt (TYPE_SIZE (t1
), TYPE_SIZE (t2
))
245 || (TYPE_SIZE (t1
) == TYPE_SIZE (t2
)
246 && !variable_record_on_lhs
247 && !variable_array_on_lhs
)))
250 /* Otherwise, if the lhs type is non-BLKmode, use it, except for the case of
251 a non-BLKmode rhs and array types with a variable part on the lhs but not
252 on the rhs to make sure the conversion is preserved during gimplification.
253 Note that we know that we will not have any alignment problems since, if
254 we did, the non-BLKmode type could not have been used. */
255 if (TYPE_MODE (t1
) != BLKmode
256 && (TYPE_MODE (t2
) == BLKmode
|| !variable_array_on_lhs
))
259 /* If the rhs type is of constant size, use it whatever the modes. At
260 this point it is known to be smaller, or of constant size and the
262 if (TREE_CONSTANT (TYPE_SIZE (t2
)))
265 /* Otherwise, if the rhs type is non-BLKmode, use it. */
266 if (TYPE_MODE (t2
) != BLKmode
)
269 /* In this case, both types have variable size and BLKmode. It's
270 probably best to leave the "type mismatch" because changing it
271 could cause a bad self-referential reference. */
275 /* Return an expression tree representing an equality comparison of A1 and A2,
276 two objects of type ARRAY_TYPE. The result should be of type RESULT_TYPE.
278 Two arrays are equal in one of two ways: (1) if both have zero length in
279 some dimension (not necessarily the same dimension) or (2) if the lengths
280 in each dimension are equal and the data is equal. We perform the length
281 tests in as efficient a manner as possible. */
284 compare_arrays (location_t loc
, tree result_type
, tree a1
, tree a2
)
286 tree result
= convert (result_type
, boolean_true_node
);
287 tree a1_is_null
= convert (result_type
, boolean_false_node
);
288 tree a2_is_null
= convert (result_type
, boolean_false_node
);
289 tree t1
= TREE_TYPE (a1
);
290 tree t2
= TREE_TYPE (a2
);
291 bool a1_side_effects_p
= TREE_SIDE_EFFECTS (a1
);
292 bool a2_side_effects_p
= TREE_SIDE_EFFECTS (a2
);
293 bool length_zero_p
= false;
295 /* If the operands have side-effects, they need to be evaluated only once
296 in spite of the multiple references in the comparison. */
297 if (a1_side_effects_p
)
298 a1
= gnat_protect_expr (a1
);
300 if (a2_side_effects_p
)
301 a2
= gnat_protect_expr (a2
);
303 /* Process each dimension separately and compare the lengths. If any
304 dimension has a length known to be zero, set LENGTH_ZERO_P to true
305 in order to suppress the comparison of the data at the end. */
306 while (TREE_CODE (t1
) == ARRAY_TYPE
&& TREE_CODE (t2
) == ARRAY_TYPE
)
308 tree dom1
= TYPE_DOMAIN (t1
);
309 tree dom2
= TYPE_DOMAIN (t2
);
310 tree length1
= size_binop (PLUS_EXPR
,
311 size_binop (MINUS_EXPR
,
312 TYPE_MAX_VALUE (dom1
),
313 TYPE_MIN_VALUE (dom1
)),
315 tree length2
= size_binop (PLUS_EXPR
,
316 size_binop (MINUS_EXPR
,
317 TYPE_MAX_VALUE (dom2
),
318 TYPE_MIN_VALUE (dom2
)),
320 tree ind1
= TYPE_INDEX_TYPE (dom1
);
321 tree ind2
= TYPE_INDEX_TYPE (dom2
);
322 tree base_type
= maybe_character_type (get_base_type (ind1
));
323 tree lb1
= convert (base_type
, TYPE_MIN_VALUE (ind1
));
324 tree ub1
= convert (base_type
, TYPE_MAX_VALUE (ind1
));
325 tree lb2
= convert (base_type
, TYPE_MIN_VALUE (ind2
));
326 tree ub2
= convert (base_type
, TYPE_MAX_VALUE (ind2
));
327 tree comparison
, this_a1_is_null
, this_a2_is_null
;
329 /* If the length of the first array is a constant and that of the second
330 array is not, swap our operands to have the constant second. */
331 if (TREE_CODE (length1
) == INTEGER_CST
332 && TREE_CODE (length2
) != INTEGER_CST
)
337 tem
= a1
, a1
= a2
, a2
= tem
;
338 tem
= t1
, t1
= t2
, t2
= tem
;
339 tem
= lb1
, lb1
= lb2
, lb2
= tem
;
340 tem
= ub1
, ub1
= ub2
, ub2
= tem
;
341 tem
= length1
, length1
= length2
, length2
= tem
;
342 tem
= a1_is_null
, a1_is_null
= a2_is_null
, a2_is_null
= tem
;
343 btem
= a1_side_effects_p
, a1_side_effects_p
= a2_side_effects_p
,
344 a2_side_effects_p
= btem
;
347 /* If the length of the second array is the constant zero, we can just
348 use the original stored bounds for the first array and see whether
349 last < first holds. */
350 if (integer_zerop (length2
))
352 length_zero_p
= true;
354 lb1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1
, a1
);
355 ub1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1
, a1
);
357 comparison
= fold_build2_loc (loc
, LT_EXPR
, result_type
, ub1
, lb1
);
358 if (EXPR_P (comparison
))
359 SET_EXPR_LOCATION (comparison
, loc
);
361 this_a1_is_null
= comparison
;
362 this_a2_is_null
= convert (result_type
, boolean_true_node
);
365 /* Otherwise, if the length is some other constant value, we know that
366 this dimension in the second array cannot be superflat, so we can
367 just use its length computed from the actual stored bounds. */
368 else if (TREE_CODE (length2
) == INTEGER_CST
)
370 /* Note that we know that LB2 and UB2 are constant and hence
371 cannot contain a PLACEHOLDER_EXPR. */
372 lb1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1
, a1
);
373 ub1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1
, a1
);
376 = fold_build2_loc (loc
, EQ_EXPR
, result_type
,
377 build_binary_op (MINUS_EXPR
, base_type
,
379 build_binary_op (MINUS_EXPR
, base_type
,
381 if (EXPR_P (comparison
))
382 SET_EXPR_LOCATION (comparison
, loc
);
385 = fold_build2_loc (loc
, LT_EXPR
, result_type
, ub1
, lb1
);
387 this_a2_is_null
= convert (result_type
, boolean_false_node
);
390 /* Otherwise, compare the computed lengths. */
393 length1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length1
, a1
);
394 length2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (length2
, a2
);
397 = fold_build2_loc (loc
, EQ_EXPR
, result_type
, length1
, length2
);
398 if (EXPR_P (comparison
))
399 SET_EXPR_LOCATION (comparison
, loc
);
401 lb1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb1
, a1
);
402 ub1
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub1
, a1
);
405 = fold_build2_loc (loc
, LT_EXPR
, result_type
, ub1
, lb1
);
407 lb2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (lb2
, a2
);
408 ub2
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (ub2
, a2
);
411 = fold_build2_loc (loc
, LT_EXPR
, result_type
, ub2
, lb2
);
414 /* Append expressions for this dimension to the final expressions. */
415 result
= build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
418 a1_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
419 this_a1_is_null
, a1_is_null
);
421 a2_is_null
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
422 this_a2_is_null
, a2_is_null
);
428 /* Unless the length of some dimension is known to be zero, compare the
429 data in the array. */
432 tree type
= find_common_type (TREE_TYPE (a1
), TREE_TYPE (a2
));
437 a1
= convert (type
, a1
),
438 a2
= convert (type
, a2
);
441 comparison
= fold_build2_loc (loc
, EQ_EXPR
, result_type
, a1
, a2
);
444 = build_binary_op (TRUTH_ANDIF_EXPR
, result_type
, result
, comparison
);
447 /* The result is also true if both sizes are zero. */
448 result
= build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
449 build_binary_op (TRUTH_ANDIF_EXPR
, result_type
,
450 a1_is_null
, a2_is_null
),
453 /* If the operands have side-effects, they need to be evaluated before
454 doing the tests above since the place they otherwise would end up
455 being evaluated at run time could be wrong. */
456 if (a1_side_effects_p
)
457 result
= build2 (COMPOUND_EXPR
, result_type
, a1
, result
);
459 if (a2_side_effects_p
)
460 result
= build2 (COMPOUND_EXPR
, result_type
, a2
, result
);
465 /* Return an expression tree representing an equality comparison of P1 and P2,
466 two objects of fat pointer type. The result should be of type RESULT_TYPE.
468 Two fat pointers are equal in one of two ways: (1) if both have a null
469 pointer to the array or (2) if they contain the same couple of pointers.
470 We perform the comparison in as efficient a manner as possible. */
473 compare_fat_pointers (location_t loc
, tree result_type
, tree p1
, tree p2
)
475 tree p1_array
, p2_array
, p1_bounds
, p2_bounds
, same_array
, same_bounds
;
476 tree p1_array_is_null
, p2_array_is_null
;
478 /* If either operand has side-effects, they have to be evaluated only once
479 in spite of the multiple references to the operand in the comparison. */
480 p1
= gnat_protect_expr (p1
);
481 p2
= gnat_protect_expr (p2
);
483 /* The constant folder doesn't fold fat pointer types so we do it here. */
484 if (TREE_CODE (p1
) == CONSTRUCTOR
)
485 p1_array
= CONSTRUCTOR_ELT (p1
, 0)->value
;
487 p1_array
= build_component_ref (p1
, TYPE_FIELDS (TREE_TYPE (p1
)), true);
490 = fold_build2_loc (loc
, EQ_EXPR
, result_type
, p1_array
,
491 fold_convert_loc (loc
, TREE_TYPE (p1_array
),
494 if (TREE_CODE (p2
) == CONSTRUCTOR
)
495 p2_array
= CONSTRUCTOR_ELT (p2
, 0)->value
;
497 p2_array
= build_component_ref (p2
, TYPE_FIELDS (TREE_TYPE (p2
)), true);
500 = fold_build2_loc (loc
, EQ_EXPR
, result_type
, p2_array
,
501 fold_convert_loc (loc
, TREE_TYPE (p2_array
),
504 /* If one of the pointers to the array is null, just compare the other. */
505 if (integer_zerop (p1_array
))
506 return p2_array_is_null
;
507 else if (integer_zerop (p2_array
))
508 return p1_array_is_null
;
510 /* Otherwise, do the fully-fledged comparison. */
512 = fold_build2_loc (loc
, EQ_EXPR
, result_type
, p1_array
, p2_array
);
514 if (TREE_CODE (p1
) == CONSTRUCTOR
)
515 p1_bounds
= CONSTRUCTOR_ELT (p1
, 1)->value
;
518 = build_component_ref (p1
, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p1
))),
521 if (TREE_CODE (p2
) == CONSTRUCTOR
)
522 p2_bounds
= CONSTRUCTOR_ELT (p2
, 1)->value
;
525 = build_component_ref (p2
, DECL_CHAIN (TYPE_FIELDS (TREE_TYPE (p2
))),
529 = fold_build2_loc (loc
, EQ_EXPR
, result_type
, p1_bounds
, p2_bounds
);
531 /* P1_ARRAY == P2_ARRAY && (P1_ARRAY == NULL || P1_BOUNDS == P2_BOUNDS). */
532 return build_binary_op (TRUTH_ANDIF_EXPR
, result_type
, same_array
,
533 build_binary_op (TRUTH_ORIF_EXPR
, result_type
,
534 p1_array_is_null
, same_bounds
));
537 /* Compute the result of applying OP_CODE to LHS and RHS, where both are of
538 type TYPE. We know that TYPE is a modular type with a nonbinary
542 nonbinary_modular_operation (enum tree_code op_code
, tree type
, tree lhs
,
545 tree modulus
= TYPE_MODULUS (type
);
546 unsigned int needed_precision
= tree_floor_log2 (modulus
) + 1;
547 unsigned int precision
;
548 bool unsignedp
= true;
552 /* If this is an addition of a constant, convert it to a subtraction
553 of a constant since we can do that faster. */
554 if (op_code
== PLUS_EXPR
&& TREE_CODE (rhs
) == INTEGER_CST
)
556 rhs
= fold_build2 (MINUS_EXPR
, type
, modulus
, rhs
);
557 op_code
= MINUS_EXPR
;
560 /* For the logical operations, we only need PRECISION bits. For
561 addition and subtraction, we need one more and for multiplication we
562 need twice as many. But we never want to make a size smaller than
564 if (op_code
== PLUS_EXPR
|| op_code
== MINUS_EXPR
)
565 needed_precision
+= 1;
566 else if (op_code
== MULT_EXPR
)
567 needed_precision
*= 2;
569 precision
= MAX (needed_precision
, TYPE_PRECISION (op_type
));
571 /* Unsigned will do for everything but subtraction. */
572 if (op_code
== MINUS_EXPR
)
575 /* If our type is the wrong signedness or isn't wide enough, make a new
576 type and convert both our operands to it. */
577 if (TYPE_PRECISION (op_type
) < precision
578 || TYPE_UNSIGNED (op_type
) != unsignedp
)
580 /* Copy the type so we ensure it can be modified to make it modular. */
581 op_type
= copy_type (gnat_type_for_size (precision
, unsignedp
));
582 modulus
= convert (op_type
, modulus
);
583 SET_TYPE_MODULUS (op_type
, modulus
);
584 TYPE_MODULAR_P (op_type
) = 1;
585 lhs
= convert (op_type
, lhs
);
586 rhs
= convert (op_type
, rhs
);
589 /* Do the operation, then we'll fix it up. */
590 result
= fold_build2 (op_code
, op_type
, lhs
, rhs
);
592 /* For multiplication, we have no choice but to do a full modulus
593 operation. However, we want to do this in the narrowest
595 if (op_code
== MULT_EXPR
)
597 /* Copy the type so we ensure it can be modified to make it modular. */
598 tree div_type
= copy_type (gnat_type_for_size (needed_precision
, 1));
599 modulus
= convert (div_type
, modulus
);
600 SET_TYPE_MODULUS (div_type
, modulus
);
601 TYPE_MODULAR_P (div_type
) = 1;
602 result
= convert (op_type
,
603 fold_build2 (TRUNC_MOD_EXPR
, div_type
,
604 convert (div_type
, result
), modulus
));
607 /* For subtraction, add the modulus back if we are negative. */
608 else if (op_code
== MINUS_EXPR
)
610 result
= gnat_protect_expr (result
);
611 result
= fold_build3 (COND_EXPR
, op_type
,
612 fold_build2 (LT_EXPR
, boolean_type_node
, result
,
613 build_int_cst (op_type
, 0)),
614 fold_build2 (PLUS_EXPR
, op_type
, result
, modulus
),
618 /* For the other operations, subtract the modulus if we are >= it. */
621 result
= gnat_protect_expr (result
);
622 result
= fold_build3 (COND_EXPR
, op_type
,
623 fold_build2 (GE_EXPR
, boolean_type_node
,
625 fold_build2 (MINUS_EXPR
, op_type
,
630 return convert (type
, result
);
633 /* This page contains routines that implement the Ada semantics with regard
634 to atomic objects. They are fully piggybacked on the middle-end support
635 for atomic loads and stores.
637 *** Memory barriers and volatile objects ***
639 We implement the weakened form of the C.6(16) clause that was introduced
640 in Ada 2012 (AI05-117). Earlier forms of this clause wouldn't have been
641 implementable without significant performance hits on modern platforms.
643 We also take advantage of the requirements imposed on shared variables by
644 9.10 (conditions for sequential actions) to have non-erroneous execution
645 and consider that C.6(16) and C.6(17) only prescribe an uniform order of
646 volatile updates with regard to sequential actions, i.e. with regard to
647 reads or updates of atomic objects.
649 As such, an update of an atomic object by a task requires that all earlier
650 accesses to volatile objects have completed. Similarly, later accesses to
651 volatile objects cannot be reordered before the update of the atomic object.
652 So, memory barriers both before and after the atomic update are needed.
654 For a read of an atomic object, to avoid seeing writes of volatile objects
655 by a task earlier than by the other tasks, a memory barrier is needed before
656 the atomic read. Finally, to avoid reordering later reads or updates of
657 volatile objects to before the atomic read, a barrier is needed after the
660 So, memory barriers are needed before and after atomic reads and updates.
661 And, in order to simplify the implementation, we use full memory barriers
662 in all cases, i.e. we enforce sequential consistency for atomic accesses. */
664 /* Return the size of TYPE, which must be a positive power of 2. */
667 resolve_atomic_size (tree type
)
669 unsigned HOST_WIDE_INT size
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
671 if (size
== 1 || size
== 2 || size
== 4 || size
== 8 || size
== 16)
674 /* We shouldn't reach here without having already detected that the size
675 isn't compatible with an atomic access. */
676 gcc_assert (Serious_Errors_Detected
);
681 /* Build an atomic load for the underlying atomic object in SRC. SYNC is
682 true if the load requires synchronization. */
685 build_atomic_load (tree src
, bool sync
)
689 (build_qualified_type (void_type_node
,
690 TYPE_QUAL_ATOMIC
| TYPE_QUAL_VOLATILE
));
692 = build_int_cst (integer_type_node
,
693 sync
? MEMMODEL_SEQ_CST
: MEMMODEL_RELAXED
);
699 /* Remove conversions to get the address of the underlying object. */
700 src
= remove_conversions (src
, false);
701 size
= resolve_atomic_size (TREE_TYPE (src
));
705 fncode
= (int) BUILT_IN_ATOMIC_LOAD_N
+ exact_log2 (size
) + 1;
706 t
= builtin_decl_implicit ((enum built_in_function
) fncode
);
708 addr
= build_unary_op (ADDR_EXPR
, ptr_type
, src
);
709 val
= build_call_expr (t
, 2, addr
, mem_model
);
711 /* First reinterpret the loaded bits in the original type of the load,
712 then convert to the expected result type. */
713 t
= fold_build1 (VIEW_CONVERT_EXPR
, TREE_TYPE (src
), val
);
714 return convert (TREE_TYPE (orig_src
), t
);
717 /* Build an atomic store from SRC to the underlying atomic object in DEST.
718 SYNC is true if the store requires synchronization. */
721 build_atomic_store (tree dest
, tree src
, bool sync
)
725 (build_qualified_type (void_type_node
,
726 TYPE_QUAL_ATOMIC
| TYPE_QUAL_VOLATILE
));
728 = build_int_cst (integer_type_node
,
729 sync
? MEMMODEL_SEQ_CST
: MEMMODEL_RELAXED
);
730 tree orig_dest
= dest
;
731 tree t
, int_type
, addr
;
735 /* Remove conversions to get the address of the underlying object. */
736 dest
= remove_conversions (dest
, false);
737 size
= resolve_atomic_size (TREE_TYPE (dest
));
739 return build_binary_op (MODIFY_EXPR
, NULL_TREE
, orig_dest
, src
);
741 fncode
= (int) BUILT_IN_ATOMIC_STORE_N
+ exact_log2 (size
) + 1;
742 t
= builtin_decl_implicit ((enum built_in_function
) fncode
);
743 int_type
= gnat_type_for_size (BITS_PER_UNIT
* size
, 1);
745 /* First convert the bits to be stored to the original type of the store,
746 then reinterpret them in the effective type. But if the original type
747 is a padded type with the same size, convert to the inner type instead,
748 as we don't want to artificially introduce a CONSTRUCTOR here. */
749 if (TYPE_IS_PADDING_P (TREE_TYPE (dest
))
750 && TYPE_SIZE (TREE_TYPE (dest
))
751 == TYPE_SIZE (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest
)))))
752 src
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (dest
))), src
);
754 src
= convert (TREE_TYPE (dest
), src
);
755 src
= fold_build1 (VIEW_CONVERT_EXPR
, int_type
, src
);
756 addr
= build_unary_op (ADDR_EXPR
, ptr_type
, dest
);
758 return build_call_expr (t
, 3, addr
, src
, mem_model
);
761 /* Build a load-modify-store sequence from SRC to DEST. GNAT_NODE is used for
762 the location of the sequence. Note that, even though the load and the store
763 are both atomic, the sequence itself is not atomic. */
766 build_load_modify_store (tree dest
, tree src
, Node_Id gnat_node
)
768 /* We will be modifying DEST below so we build a copy. */
769 dest
= copy_node (dest
);
772 while (handled_component_p (ref
))
774 /* The load should already have been generated during the translation
775 of the GNAT destination tree; find it out in the GNU tree. */
776 if (TREE_CODE (TREE_OPERAND (ref
, 0)) == VIEW_CONVERT_EXPR
)
778 tree op
= TREE_OPERAND (TREE_OPERAND (ref
, 0), 0);
779 if (TREE_CODE (op
) == CALL_EXPR
&& call_is_atomic_load (op
))
781 tree type
= TREE_TYPE (TREE_OPERAND (ref
, 0));
782 tree t
= CALL_EXPR_ARG (op
, 0);
783 tree obj
, temp
, stmt
;
785 /* Find out the loaded object. */
786 if (TREE_CODE (t
) == NOP_EXPR
)
787 t
= TREE_OPERAND (t
, 0);
788 if (TREE_CODE (t
) == ADDR_EXPR
)
789 obj
= TREE_OPERAND (t
, 0);
791 obj
= build1 (INDIRECT_REF
, type
, t
);
793 /* Drop atomic and volatile qualifiers for the temporary. */
794 type
= TYPE_MAIN_VARIANT (type
);
796 /* And drop BLKmode, if need be, to put it into a register. */
797 if (TYPE_MODE (type
) == BLKmode
)
799 unsigned int size
= tree_to_uhwi (TYPE_SIZE (type
));
800 type
= copy_type (type
);
801 machine_mode mode
= int_mode_for_size (size
, 0).else_blk ();
802 SET_TYPE_MODE (type
, mode
);
805 /* Create the temporary by inserting a SAVE_EXPR. */
806 temp
= build1 (SAVE_EXPR
, type
,
807 build1 (VIEW_CONVERT_EXPR
, type
, op
));
808 TREE_OPERAND (ref
, 0) = temp
;
812 /* Build the modify of the temporary. */
813 stmt
= build_binary_op (MODIFY_EXPR
, NULL_TREE
, dest
, src
);
814 add_stmt_with_node (stmt
, gnat_node
);
816 /* Build the store to the object. */
817 stmt
= build_atomic_store (obj
, temp
, false);
818 add_stmt_with_node (stmt
, gnat_node
);
820 return end_stmt_group ();
824 TREE_OPERAND (ref
, 0) = copy_node (TREE_OPERAND (ref
, 0));
825 ref
= TREE_OPERAND (ref
, 0);
828 /* Something went wrong earlier if we have not found the atomic load. */
832 /* Make a binary operation of kind OP_CODE. RESULT_TYPE is the type
833 desired for the result. Usually the operation is to be performed
834 in that type. For INIT_EXPR and MODIFY_EXPR, RESULT_TYPE must be
835 NULL_TREE. For ARRAY_REF, RESULT_TYPE may be NULL_TREE, in which
836 case the type to be used will be derived from the operands.
837 Don't fold the result if NO_FOLD is true.
839 This function is very much unlike the ones for C and C++ since we
840 have already done any type conversion and matching required. All we
841 have to do here is validate the work done by SEM and handle subtypes. */
844 build_binary_op (enum tree_code op_code
, tree result_type
,
845 tree left_operand
, tree right_operand
,
848 tree left_type
= TREE_TYPE (left_operand
);
849 tree right_type
= TREE_TYPE (right_operand
);
850 tree left_base_type
= get_base_type (left_type
);
851 tree right_base_type
= get_base_type (right_type
);
852 tree operation_type
= result_type
;
853 tree best_type
= NULL_TREE
;
854 tree modulus
, result
;
855 bool has_side_effects
= false;
858 && TREE_CODE (operation_type
) == RECORD_TYPE
859 && TYPE_JUSTIFIED_MODULAR_P (operation_type
))
860 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
862 if (operation_type
&& TYPE_IS_EXTRA_SUBTYPE_P (operation_type
))
863 operation_type
= get_base_type (operation_type
);
865 modulus
= (operation_type
866 && TREE_CODE (operation_type
) == INTEGER_TYPE
867 && TYPE_MODULAR_P (operation_type
)
868 ? TYPE_MODULUS (operation_type
) : NULL_TREE
);
874 gcc_checking_assert (!result_type
);
876 /* If there were integral or pointer conversions on the LHS, remove
877 them; we'll be putting them back below if needed. Likewise for
878 conversions between record types, except for justified modular types.
879 But don't do this if the right operand is not BLKmode (for packed
880 arrays) unless we are not changing the mode. */
881 while ((CONVERT_EXPR_P (left_operand
)
882 || TREE_CODE (left_operand
) == VIEW_CONVERT_EXPR
)
883 && (((INTEGRAL_TYPE_P (left_type
)
884 || POINTER_TYPE_P (left_type
))
885 && (INTEGRAL_TYPE_P (operand_type (left_operand
))
886 || POINTER_TYPE_P (operand_type (left_operand
))))
887 || (TREE_CODE (left_type
) == RECORD_TYPE
888 && !TYPE_JUSTIFIED_MODULAR_P (left_type
)
889 && TREE_CODE (operand_type (left_operand
)) == RECORD_TYPE
890 && (TYPE_MODE (right_type
) == BLKmode
891 || TYPE_MODE (left_type
)
892 == TYPE_MODE (operand_type (left_operand
))))))
894 left_operand
= TREE_OPERAND (left_operand
, 0);
895 left_type
= TREE_TYPE (left_operand
);
898 /* If a class-wide type may be involved, force use of the RHS type. */
899 if ((TREE_CODE (right_type
) == RECORD_TYPE
900 || TREE_CODE (right_type
) == UNION_TYPE
)
901 && TYPE_ALIGN_OK (right_type
))
902 operation_type
= right_type
;
904 /* If we are copying between padded objects with compatible types, use
905 the padded view of the objects, this is very likely more efficient.
906 Likewise for a padded object that is assigned a constructor, if we
907 can convert the constructor to the inner type, to avoid putting a
908 VIEW_CONVERT_EXPR on the LHS. But don't do so if we wouldn't have
909 actually copied anything. */
910 else if (TYPE_IS_PADDING_P (left_type
)
911 && TREE_CONSTANT (TYPE_SIZE (left_type
))
912 && ((TREE_CODE (right_operand
) == COMPONENT_REF
913 && TYPE_MAIN_VARIANT (left_type
)
914 == TYPE_MAIN_VARIANT (operand_type (right_operand
)))
915 || (TREE_CODE (right_operand
) == CONSTRUCTOR
916 && !CONTAINS_PLACEHOLDER_P
917 (DECL_SIZE (TYPE_FIELDS (left_type
)))))
918 && !integer_zerop (TYPE_SIZE (right_type
)))
920 /* We make an exception for a BLKmode type padding a non-BLKmode
921 inner type and do the conversion of the LHS right away, since
922 unchecked_convert wouldn't do it properly. */
923 if (TYPE_MODE (left_type
) == BLKmode
924 && TYPE_MODE (right_type
) != BLKmode
925 && TREE_CODE (right_operand
) != CONSTRUCTOR
)
927 operation_type
= right_type
;
928 left_operand
= convert (operation_type
, left_operand
);
929 left_type
= operation_type
;
932 operation_type
= left_type
;
935 /* If we have a call to a function that returns with variable size, use
936 the RHS type in case we want to use the return slot optimization. */
937 else if (TREE_CODE (right_operand
) == CALL_EXPR
938 && return_type_with_variable_size_p (right_type
))
939 operation_type
= right_type
;
941 /* Find the best type to use for copying between aggregate types. */
942 else if (((TREE_CODE (left_type
) == ARRAY_TYPE
943 && TREE_CODE (right_type
) == ARRAY_TYPE
)
944 || (TREE_CODE (left_type
) == RECORD_TYPE
945 && TREE_CODE (right_type
) == RECORD_TYPE
))
946 && (best_type
= find_common_type (left_type
, right_type
)))
947 operation_type
= best_type
;
949 /* Otherwise use the LHS type. */
951 operation_type
= left_type
;
953 /* Ensure everything on the LHS is valid. If we have a field reference,
954 strip anything that get_inner_reference can handle. Then remove any
955 conversions between types having the same code and mode. And mark
956 VIEW_CONVERT_EXPRs with TREE_ADDRESSABLE. When done, we must have
957 either an INDIRECT_REF, a NULL_EXPR, a SAVE_EXPR or a DECL node. */
958 result
= left_operand
;
961 tree restype
= TREE_TYPE (result
);
963 if (TREE_CODE (result
) == COMPONENT_REF
964 || TREE_CODE (result
) == ARRAY_REF
965 || TREE_CODE (result
) == ARRAY_RANGE_REF
)
966 while (handled_component_p (result
))
967 result
= TREE_OPERAND (result
, 0);
969 else if (TREE_CODE (result
) == REALPART_EXPR
970 || TREE_CODE (result
) == IMAGPART_EXPR
971 || (CONVERT_EXPR_P (result
)
972 && (((TREE_CODE (restype
)
973 == TREE_CODE (operand_type (result
))
974 && TYPE_MODE (restype
)
975 == TYPE_MODE (operand_type (result
))))
976 || TYPE_ALIGN_OK (restype
))))
977 result
= TREE_OPERAND (result
, 0);
979 else if (TREE_CODE (result
) == VIEW_CONVERT_EXPR
)
981 TREE_ADDRESSABLE (result
) = 1;
982 result
= TREE_OPERAND (result
, 0);
989 gcc_assert (TREE_CODE (result
) == INDIRECT_REF
990 || TREE_CODE (result
) == NULL_EXPR
991 || TREE_CODE (result
) == SAVE_EXPR
994 /* Convert the right operand to the operation type unless it is
995 either already of the correct type or if the type involves a
996 placeholder, since the RHS may not have the same record type. */
997 if (operation_type
!= right_type
998 && !CONTAINS_PLACEHOLDER_P (TYPE_SIZE (operation_type
)))
1000 right_operand
= convert (operation_type
, right_operand
);
1001 right_type
= operation_type
;
1004 /* If the left operand is not of the same type as the operation
1005 type, wrap it up in a VIEW_CONVERT_EXPR. */
1006 if (left_type
!= operation_type
)
1007 left_operand
= unchecked_convert (operation_type
, left_operand
, false);
1009 has_side_effects
= true;
1010 modulus
= NULL_TREE
;
1014 if (!operation_type
)
1015 operation_type
= TREE_TYPE (left_type
);
1017 /* ... fall through ... */
1019 case ARRAY_RANGE_REF
:
1020 /* First look through conversion between type variants. Note that
1021 this changes neither the operation type nor the type domain. */
1022 if (TREE_CODE (left_operand
) == VIEW_CONVERT_EXPR
1023 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (left_operand
, 0)))
1024 == TYPE_MAIN_VARIANT (left_type
))
1026 left_operand
= TREE_OPERAND (left_operand
, 0);
1027 left_type
= TREE_TYPE (left_operand
);
1030 /* For a range, make sure the element type is consistent. */
1031 if (op_code
== ARRAY_RANGE_REF
1032 && TREE_TYPE (operation_type
) != TREE_TYPE (left_type
))
1035 = build_nonshared_array_type (TREE_TYPE (left_type
),
1036 TYPE_DOMAIN (operation_type
));
1037 /* Declare it now since it will never be declared otherwise. This
1038 is necessary to ensure that its subtrees are properly marked. */
1039 create_type_decl (TYPE_NAME (operation_type
), operation_type
, true,
1043 /* Then convert the right operand to its base type. This will prevent
1044 unneeded sign conversions when sizetype is wider than integer. */
1045 right_operand
= convert (right_base_type
, right_operand
);
1046 right_operand
= convert_to_index_type (right_operand
);
1047 modulus
= NULL_TREE
;
1050 case TRUTH_ANDIF_EXPR
:
1051 case TRUTH_ORIF_EXPR
:
1052 case TRUTH_AND_EXPR
:
1054 case TRUTH_XOR_EXPR
:
1056 (TREE_CODE (get_base_type (result_type
)) == BOOLEAN_TYPE
);
1057 operation_type
= left_base_type
;
1058 left_operand
= convert (operation_type
, left_operand
);
1059 right_operand
= convert (operation_type
, right_operand
);
1069 (TREE_CODE (get_base_type (result_type
)) == BOOLEAN_TYPE
);
1070 /* If either operand is a NULL_EXPR, just return a new one. */
1071 if (TREE_CODE (left_operand
) == NULL_EXPR
)
1072 return build2 (op_code
, result_type
,
1073 build1 (NULL_EXPR
, integer_type_node
,
1074 TREE_OPERAND (left_operand
, 0)),
1077 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
1078 return build2 (op_code
, result_type
,
1079 build1 (NULL_EXPR
, integer_type_node
,
1080 TREE_OPERAND (right_operand
, 0)),
1083 /* If either object is a justified modular types, get the
1084 fields from within. */
1085 if (TREE_CODE (left_type
) == RECORD_TYPE
1086 && TYPE_JUSTIFIED_MODULAR_P (left_type
))
1088 left_operand
= convert (TREE_TYPE (TYPE_FIELDS (left_type
)),
1090 left_type
= TREE_TYPE (left_operand
);
1091 left_base_type
= get_base_type (left_type
);
1094 if (TREE_CODE (right_type
) == RECORD_TYPE
1095 && TYPE_JUSTIFIED_MODULAR_P (right_type
))
1097 right_operand
= convert (TREE_TYPE (TYPE_FIELDS (right_type
)),
1099 right_type
= TREE_TYPE (right_operand
);
1100 right_base_type
= get_base_type (right_type
);
1103 /* If both objects are arrays, compare them specially. */
1104 if ((TREE_CODE (left_type
) == ARRAY_TYPE
1105 || (TREE_CODE (left_type
) == INTEGER_TYPE
1106 && TYPE_HAS_ACTUAL_BOUNDS_P (left_type
)))
1107 && (TREE_CODE (right_type
) == ARRAY_TYPE
1108 || (TREE_CODE (right_type
) == INTEGER_TYPE
1109 && TYPE_HAS_ACTUAL_BOUNDS_P (right_type
))))
1111 result
= compare_arrays (input_location
,
1112 result_type
, left_operand
, right_operand
);
1113 if (op_code
== NE_EXPR
)
1114 result
= invert_truthvalue_loc (EXPR_LOCATION (result
), result
);
1116 gcc_assert (op_code
== EQ_EXPR
);
1121 /* Otherwise, the base types must be the same, unless they are both (fat)
1122 pointer types or record types. In the latter case, use the best type
1123 and convert both operands to that type. */
1124 if (left_base_type
!= right_base_type
)
1126 if (TYPE_IS_FAT_POINTER_P (left_base_type
)
1127 && TYPE_IS_FAT_POINTER_P (right_base_type
))
1129 gcc_assert (TYPE_MAIN_VARIANT (left_base_type
)
1130 == TYPE_MAIN_VARIANT (right_base_type
));
1131 best_type
= left_base_type
;
1134 else if (POINTER_TYPE_P (left_base_type
)
1135 && POINTER_TYPE_P (right_base_type
))
1137 /* Anonymous access types in Ada 2005 can point to different
1138 members of a tagged type hierarchy. */
1139 gcc_assert (TYPE_MAIN_VARIANT (TREE_TYPE (left_base_type
))
1140 == TYPE_MAIN_VARIANT (TREE_TYPE (right_base_type
))
1141 || (TYPE_ALIGN_OK (TREE_TYPE (left_base_type
))
1142 && TYPE_ALIGN_OK (TREE_TYPE (right_base_type
))));
1143 best_type
= left_base_type
;
1146 else if (TREE_CODE (left_base_type
) == RECORD_TYPE
1147 && TREE_CODE (right_base_type
) == RECORD_TYPE
)
1149 /* The only way this is permitted is if both types have the same
1150 name. In that case, one of them must not be self-referential.
1151 Use it as the best type. Even better with a fixed size. */
1152 gcc_assert (TYPE_NAME (left_base_type
)
1153 && TYPE_NAME (left_base_type
)
1154 == TYPE_NAME (right_base_type
));
1156 if (TREE_CONSTANT (TYPE_SIZE (left_base_type
)))
1157 best_type
= left_base_type
;
1158 else if (TREE_CONSTANT (TYPE_SIZE (right_base_type
)))
1159 best_type
= right_base_type
;
1160 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (left_base_type
)))
1161 best_type
= left_base_type
;
1162 else if (!CONTAINS_PLACEHOLDER_P (TYPE_SIZE (right_base_type
)))
1163 best_type
= right_base_type
;
1171 left_operand
= convert (best_type
, left_operand
);
1172 right_operand
= convert (best_type
, right_operand
);
1176 left_operand
= convert (left_base_type
, left_operand
);
1177 right_operand
= convert (right_base_type
, right_operand
);
1180 /* If both objects are fat pointers, compare them specially. */
1181 if (TYPE_IS_FAT_POINTER_P (left_base_type
))
1184 = compare_fat_pointers (input_location
,
1185 result_type
, left_operand
, right_operand
);
1186 if (op_code
== NE_EXPR
)
1187 result
= invert_truthvalue_loc (EXPR_LOCATION (result
), result
);
1189 gcc_assert (op_code
== EQ_EXPR
);
1194 modulus
= NULL_TREE
;
1201 /* The RHS of a shift can be any type. Also, ignore any modulus
1202 (we used to abort, but this is needed for unchecked conversion
1203 to modular types). Otherwise, processing is the same as normal. */
1204 gcc_assert (operation_type
== left_base_type
);
1205 modulus
= NULL_TREE
;
1206 left_operand
= convert (operation_type
, left_operand
);
1212 /* For binary modulus, if the inputs are in range, so are the
1214 if (modulus
&& integer_pow2p (modulus
))
1215 modulus
= NULL_TREE
;
1219 gcc_assert (TREE_TYPE (result_type
) == left_base_type
1220 && TREE_TYPE (result_type
) == right_base_type
);
1221 left_operand
= convert (left_base_type
, left_operand
);
1222 right_operand
= convert (right_base_type
, right_operand
);
1225 case TRUNC_DIV_EXPR
: case TRUNC_MOD_EXPR
:
1226 case CEIL_DIV_EXPR
: case CEIL_MOD_EXPR
:
1227 case FLOOR_DIV_EXPR
: case FLOOR_MOD_EXPR
:
1228 case ROUND_DIV_EXPR
: case ROUND_MOD_EXPR
:
1229 /* These always produce results lower than either operand. */
1230 modulus
= NULL_TREE
;
1233 case POINTER_PLUS_EXPR
:
1234 gcc_assert (operation_type
== left_base_type
1235 && sizetype
== right_base_type
);
1236 left_operand
= convert (operation_type
, left_operand
);
1237 right_operand
= convert (sizetype
, right_operand
);
1240 case PLUS_NOMOD_EXPR
:
1241 case MINUS_NOMOD_EXPR
:
1242 if (op_code
== PLUS_NOMOD_EXPR
)
1243 op_code
= PLUS_EXPR
;
1245 op_code
= MINUS_EXPR
;
1246 modulus
= NULL_TREE
;
1248 /* ... fall through ... */
1252 /* Avoid doing arithmetics in ENUMERAL_TYPE or BOOLEAN_TYPE like the
1253 other compilers. Contrary to C, Ada doesn't allow arithmetics in
1254 these types but can generate addition/subtraction for Succ/Pred. */
1256 && (TREE_CODE (operation_type
) == ENUMERAL_TYPE
1257 || TREE_CODE (operation_type
) == BOOLEAN_TYPE
))
1258 operation_type
= left_base_type
= right_base_type
1259 = gnat_type_for_mode (TYPE_MODE (operation_type
),
1260 TYPE_UNSIGNED (operation_type
));
1262 /* ... fall through ... */
1266 /* The result type should be the same as the base types of the
1267 both operands (and they should be the same). Convert
1268 everything to the result type. */
1270 gcc_assert (operation_type
== left_base_type
1271 && left_base_type
== right_base_type
);
1272 left_operand
= convert (operation_type
, left_operand
);
1273 right_operand
= convert (operation_type
, right_operand
);
1276 if (modulus
&& !integer_pow2p (modulus
))
1278 result
= nonbinary_modular_operation (op_code
, operation_type
,
1279 left_operand
, right_operand
);
1280 modulus
= NULL_TREE
;
1282 /* If either operand is a NULL_EXPR, just return a new one. */
1283 else if (TREE_CODE (left_operand
) == NULL_EXPR
)
1284 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (left_operand
, 0));
1285 else if (TREE_CODE (right_operand
) == NULL_EXPR
)
1286 return build1 (NULL_EXPR
, operation_type
, TREE_OPERAND (right_operand
, 0));
1287 else if (op_code
== ARRAY_REF
|| op_code
== ARRAY_RANGE_REF
)
1289 result
= build4 (op_code
, operation_type
, left_operand
, right_operand
,
1290 NULL_TREE
, NULL_TREE
);
1292 result
= fold (result
);
1294 else if (op_code
== INIT_EXPR
|| op_code
== MODIFY_EXPR
)
1295 result
= build2 (op_code
, void_type_node
, left_operand
, right_operand
);
1297 result
= build2 (op_code
, operation_type
, left_operand
, right_operand
);
1300 = fold_build2 (op_code
, operation_type
, left_operand
, right_operand
);
1302 if (TREE_CONSTANT (result
))
1304 else if (op_code
== ARRAY_REF
|| op_code
== ARRAY_RANGE_REF
)
1306 if (TYPE_VOLATILE (operation_type
))
1307 TREE_THIS_VOLATILE (result
) = 1;
1309 else if (TREE_CONSTANT (left_operand
) && TREE_CONSTANT (right_operand
))
1310 TREE_CONSTANT (result
) = 1;
1312 if (has_side_effects
)
1313 TREE_SIDE_EFFECTS (result
) = 1;
1315 /* If we are working with modular types, perform the MOD operation
1316 if something above hasn't eliminated the need for it. */
1319 modulus
= convert (operation_type
, modulus
);
1321 result
= build2 (FLOOR_MOD_EXPR
, operation_type
, result
, modulus
);
1323 result
= fold_build2 (FLOOR_MOD_EXPR
, operation_type
, result
, modulus
);
1326 if (result_type
&& result_type
!= operation_type
)
1327 result
= convert (result_type
, result
);
1332 /* Similar, but for unary operations. */
1335 build_unary_op (enum tree_code op_code
, tree result_type
, tree operand
)
1337 tree type
= TREE_TYPE (operand
);
1338 tree base_type
= get_base_type (type
);
1339 tree operation_type
= result_type
;
1343 && TREE_CODE (operation_type
) == RECORD_TYPE
1344 && TYPE_JUSTIFIED_MODULAR_P (operation_type
))
1345 operation_type
= TREE_TYPE (TYPE_FIELDS (operation_type
));
1348 && TREE_CODE (operation_type
) == INTEGER_TYPE
1349 && TYPE_EXTRA_SUBTYPE_P (operation_type
))
1350 operation_type
= get_base_type (operation_type
);
1356 if (!operation_type
)
1357 result_type
= operation_type
= TREE_TYPE (type
);
1359 gcc_assert (result_type
== TREE_TYPE (type
));
1361 result
= fold_build1 (op_code
, operation_type
, operand
);
1364 case TRUTH_NOT_EXPR
:
1366 (TREE_CODE (get_base_type (result_type
)) == BOOLEAN_TYPE
);
1367 result
= invert_truthvalue_loc (EXPR_LOCATION (operand
), operand
);
1368 /* When not optimizing, fold the result as invert_truthvalue_loc
1369 doesn't fold the result of comparisons. This is intended to undo
1370 the trick used for boolean rvalues in gnat_to_gnu. */
1372 result
= fold (result
);
1375 case ATTR_ADDR_EXPR
:
1377 switch (TREE_CODE (operand
))
1380 case UNCONSTRAINED_ARRAY_REF
:
1381 result
= TREE_OPERAND (operand
, 0);
1383 /* Make sure the type here is a pointer, not a reference.
1384 GCC wants pointer types for function addresses. */
1386 result_type
= build_pointer_type (type
);
1388 /* If the underlying object can alias everything, propagate the
1389 property since we are effectively retrieving the object. */
1390 if (POINTER_TYPE_P (TREE_TYPE (result
))
1391 && TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (result
)))
1393 if (TREE_CODE (result_type
) == POINTER_TYPE
1394 && !TYPE_REF_CAN_ALIAS_ALL (result_type
))
1396 = build_pointer_type_for_mode (TREE_TYPE (result_type
),
1397 TYPE_MODE (result_type
),
1399 else if (TREE_CODE (result_type
) == REFERENCE_TYPE
1400 && !TYPE_REF_CAN_ALIAS_ALL (result_type
))
1402 = build_reference_type_for_mode (TREE_TYPE (result_type
),
1403 TYPE_MODE (result_type
),
1410 TREE_TYPE (result
) = type
= build_pointer_type (type
);
1414 /* Fold a compound expression if it has unconstrained array type
1415 since the middle-end cannot handle it. But we don't it in the
1416 general case because it may introduce aliasing issues if the
1417 first operand is an indirect assignment and the second operand
1418 the corresponding address, e.g. for an allocator. However do
1419 it for a return value to expose it for later recognition. */
1420 if (TREE_CODE (type
) == UNCONSTRAINED_ARRAY_TYPE
1421 || (TREE_CODE (TREE_OPERAND (operand
, 1)) == VAR_DECL
1422 && DECL_RETURN_VALUE_P (TREE_OPERAND (operand
, 1))))
1424 result
= build_unary_op (ADDR_EXPR
, result_type
,
1425 TREE_OPERAND (operand
, 1));
1426 result
= build2 (COMPOUND_EXPR
, TREE_TYPE (result
),
1427 TREE_OPERAND (operand
, 0), result
);
1433 case ARRAY_RANGE_REF
:
1436 /* If this is for 'Address, find the address of the prefix and add
1437 the offset to the field. Otherwise, do this the normal way. */
1438 if (op_code
== ATTR_ADDR_EXPR
)
1444 int unsignedp
, reversep
, volatilep
;
1446 inner
= get_inner_reference (operand
, &bitsize
, &bitpos
, &offset
,
1447 &mode
, &unsignedp
, &reversep
,
1450 /* If INNER is a padding type whose field has a self-referential
1451 size, convert to that inner type. We know the offset is zero
1452 and we need to have that type visible. */
1453 if (type_is_padding_self_referential (TREE_TYPE (inner
)))
1454 inner
= convert (TREE_TYPE (TYPE_FIELDS (TREE_TYPE (inner
))),
1457 /* Compute the offset as a byte offset from INNER. */
1459 offset
= size_zero_node
;
1462 = size_binop (PLUS_EXPR
, offset
,
1463 size_int (bits_to_bytes_round_down (bitpos
)));
1465 /* Take the address of INNER, convert it to a pointer to our type
1466 and add the offset. */
1467 inner
= build_unary_op (ADDR_EXPR
,
1468 build_pointer_type (TREE_TYPE (operand
)),
1470 result
= build_binary_op (POINTER_PLUS_EXPR
, TREE_TYPE (inner
),
1477 /* If this is just a constructor for a padded record, we can
1478 just take the address of the single field and convert it to
1479 a pointer to our type. */
1480 if (TYPE_IS_PADDING_P (type
))
1483 = build_unary_op (ADDR_EXPR
,
1484 build_pointer_type (TREE_TYPE (operand
)),
1485 CONSTRUCTOR_ELT (operand
, 0)->value
);
1491 if (AGGREGATE_TYPE_P (type
)
1492 && AGGREGATE_TYPE_P (TREE_TYPE (TREE_OPERAND (operand
, 0))))
1493 return build_unary_op (ADDR_EXPR
, result_type
,
1494 TREE_OPERAND (operand
, 0));
1496 /* ... fallthru ... */
1498 case VIEW_CONVERT_EXPR
:
1499 /* If this just a variant conversion or if the conversion doesn't
1500 change the mode, get the result type from this type and go down.
1501 This is needed for conversions of CONST_DECLs, to eventually get
1502 to the address of their CORRESPONDING_VARs. */
1503 if ((TYPE_MAIN_VARIANT (type
)
1504 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (operand
, 0))))
1505 || (TYPE_MODE (type
) != BLKmode
1506 && (TYPE_MODE (type
)
1507 == TYPE_MODE (TREE_TYPE (TREE_OPERAND (operand
, 0))))))
1508 return build_unary_op (ADDR_EXPR
,
1509 (result_type
? result_type
1510 : build_pointer_type (type
)),
1511 TREE_OPERAND (operand
, 0));
1515 operand
= DECL_CONST_CORRESPONDING_VAR (operand
);
1517 /* ... fall through ... */
1522 /* If we are taking the address of a padded record whose field
1523 contains a template, take the address of the field. */
1524 if (TYPE_IS_PADDING_P (type
)
1525 && TREE_CODE (TREE_TYPE (TYPE_FIELDS (type
))) == RECORD_TYPE
1526 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (TYPE_FIELDS (type
))))
1528 type
= TREE_TYPE (TYPE_FIELDS (type
));
1529 operand
= convert (type
, operand
);
1532 gnat_mark_addressable (operand
);
1533 result
= build_fold_addr_expr (operand
);
1536 if (TREE_CONSTANT (operand
) || staticp (operand
))
1537 TREE_CONSTANT (result
) = 1;
1543 tree t
= remove_conversions (operand
, false);
1544 bool can_never_be_null
= DECL_P (t
) && DECL_CAN_NEVER_BE_NULL_P (t
);
1546 /* If TYPE is a thin pointer, either first retrieve the base if this
1547 is an expression with an offset built for the initialization of an
1548 object with an unconstrained nominal subtype, or else convert to
1550 if (TYPE_IS_THIN_POINTER_P (type
))
1552 tree rec_type
= TREE_TYPE (type
);
1554 if (TREE_CODE (operand
) == POINTER_PLUS_EXPR
1555 && TREE_OPERAND (operand
, 1)
1556 == byte_position (DECL_CHAIN (TYPE_FIELDS (rec_type
)))
1557 && TREE_CODE (TREE_OPERAND (operand
, 0)) == NOP_EXPR
)
1559 operand
= TREE_OPERAND (TREE_OPERAND (operand
, 0), 0);
1560 type
= TREE_TYPE (operand
);
1562 else if (TYPE_UNCONSTRAINED_ARRAY (rec_type
))
1565 = convert (TREE_TYPE (TYPE_UNCONSTRAINED_ARRAY (rec_type
)),
1567 type
= TREE_TYPE (operand
);
1571 /* If we want to refer to an unconstrained array, use the appropriate
1572 expression. But this will never survive down to the back-end. */
1573 if (TYPE_IS_FAT_POINTER_P (type
))
1575 result
= build1 (UNCONSTRAINED_ARRAY_REF
,
1576 TYPE_UNCONSTRAINED_ARRAY (type
), operand
);
1577 TREE_READONLY (result
)
1578 = TYPE_READONLY (TYPE_UNCONSTRAINED_ARRAY (type
));
1581 /* If we are dereferencing an ADDR_EXPR, return its operand. */
1582 else if (TREE_CODE (operand
) == ADDR_EXPR
)
1583 result
= TREE_OPERAND (operand
, 0);
1585 /* Otherwise, build and fold the indirect reference. */
1588 result
= build_fold_indirect_ref (operand
);
1589 TREE_READONLY (result
) = TYPE_READONLY (TREE_TYPE (type
));
1592 if (!TYPE_IS_FAT_POINTER_P (type
) && TYPE_VOLATILE (TREE_TYPE (type
)))
1594 TREE_SIDE_EFFECTS (result
) = 1;
1595 if (TREE_CODE (result
) == INDIRECT_REF
)
1596 TREE_THIS_VOLATILE (result
) = TYPE_VOLATILE (TREE_TYPE (result
));
1599 if ((TREE_CODE (result
) == INDIRECT_REF
1600 || TREE_CODE (result
) == UNCONSTRAINED_ARRAY_REF
)
1601 && can_never_be_null
)
1602 TREE_THIS_NOTRAP (result
) = 1;
1610 tree modulus
= ((operation_type
1611 && TREE_CODE (operation_type
) == INTEGER_TYPE
1612 && TYPE_MODULAR_P (operation_type
))
1613 ? TYPE_MODULUS (operation_type
) : NULL_TREE
);
1614 int mod_pow2
= modulus
&& integer_pow2p (modulus
);
1616 /* If this is a modular type, there are various possibilities
1617 depending on the operation and whether the modulus is a
1618 power of two or not. */
1622 gcc_assert (operation_type
== base_type
);
1623 operand
= convert (operation_type
, operand
);
1625 /* The fastest in the negate case for binary modulus is
1626 the straightforward code; the TRUNC_MOD_EXPR below
1627 is an AND operation. */
1628 if (op_code
== NEGATE_EXPR
&& mod_pow2
)
1629 result
= fold_build2 (TRUNC_MOD_EXPR
, operation_type
,
1630 fold_build1 (NEGATE_EXPR
, operation_type
,
1634 /* For nonbinary negate case, return zero for zero operand,
1635 else return the modulus minus the operand. If the modulus
1636 is a power of two minus one, we can do the subtraction
1637 as an XOR since it is equivalent and faster on most machines. */
1638 else if (op_code
== NEGATE_EXPR
&& !mod_pow2
)
1640 if (integer_pow2p (fold_build2 (PLUS_EXPR
, operation_type
,
1642 build_int_cst (operation_type
,
1644 result
= fold_build2 (BIT_XOR_EXPR
, operation_type
,
1647 result
= fold_build2 (MINUS_EXPR
, operation_type
,
1650 result
= fold_build3 (COND_EXPR
, operation_type
,
1651 fold_build2 (NE_EXPR
,
1655 (operation_type
, 0)),
1660 /* For the NOT cases, we need a constant equal to
1661 the modulus minus one. For a binary modulus, we
1662 XOR against the constant and subtract the operand from
1663 that constant for nonbinary modulus. */
1665 tree cnst
= fold_build2 (MINUS_EXPR
, operation_type
, modulus
,
1666 build_int_cst (operation_type
, 1));
1669 result
= fold_build2 (BIT_XOR_EXPR
, operation_type
,
1672 result
= fold_build2 (MINUS_EXPR
, operation_type
,
1680 /* ... fall through ... */
1683 gcc_assert (operation_type
== base_type
);
1684 result
= fold_build1 (op_code
, operation_type
,
1685 convert (operation_type
, operand
));
1688 if (result_type
&& TREE_TYPE (result
) != result_type
)
1689 result
= convert (result_type
, result
);
1694 /* Similar, but for COND_EXPR. */
1697 build_cond_expr (tree result_type
, tree condition_operand
,
1698 tree true_operand
, tree false_operand
)
1700 bool addr_p
= false;
1703 /* The front-end verified that result, true and false operands have
1704 same base type. Convert everything to the result type. */
1705 true_operand
= convert (result_type
, true_operand
);
1706 false_operand
= convert (result_type
, false_operand
);
1708 /* If the result type is unconstrained, take the address of the operands and
1709 then dereference the result. Likewise if the result type is passed by
1710 reference, because creating a temporary of this type is not allowed. */
1711 if (TREE_CODE (result_type
) == UNCONSTRAINED_ARRAY_TYPE
1712 || TYPE_IS_BY_REFERENCE_P (result_type
)
1713 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type
)))
1715 result_type
= build_pointer_type (result_type
);
1716 true_operand
= build_unary_op (ADDR_EXPR
, result_type
, true_operand
);
1717 false_operand
= build_unary_op (ADDR_EXPR
, result_type
, false_operand
);
1721 result
= fold_build3 (COND_EXPR
, result_type
, condition_operand
,
1722 true_operand
, false_operand
);
1724 /* If we have a common SAVE_EXPR (possibly surrounded by arithmetics)
1725 in both arms, make sure it gets evaluated by moving it ahead of the
1726 conditional expression. This is necessary because it is evaluated
1727 in only one place at run time and would otherwise be uninitialized
1728 in one of the arms. */
1729 true_operand
= skip_simple_arithmetic (true_operand
);
1730 false_operand
= skip_simple_arithmetic (false_operand
);
1732 if (true_operand
== false_operand
&& TREE_CODE (true_operand
) == SAVE_EXPR
)
1733 result
= build2 (COMPOUND_EXPR
, result_type
, true_operand
, result
);
1736 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1741 /* Similar, but for COMPOUND_EXPR. */
1744 build_compound_expr (tree result_type
, tree stmt_operand
, tree expr_operand
)
1746 bool addr_p
= false;
1749 /* If the result type is unconstrained, take the address of the operand and
1750 then dereference the result. Likewise if the result type is passed by
1751 reference, but this is natively handled in the gimplifier. */
1752 if (TREE_CODE (result_type
) == UNCONSTRAINED_ARRAY_TYPE
1753 || CONTAINS_PLACEHOLDER_P (TYPE_SIZE (result_type
)))
1755 result_type
= build_pointer_type (result_type
);
1756 expr_operand
= build_unary_op (ADDR_EXPR
, result_type
, expr_operand
);
1760 result
= fold_build2 (COMPOUND_EXPR
, result_type
, stmt_operand
,
1764 result
= build_unary_op (INDIRECT_REF
, NULL_TREE
, result
);
1769 /* Conveniently construct a function call expression. FNDECL names the
1770 function to be called, N is the number of arguments, and the "..."
1771 parameters are the argument expressions. Unlike build_call_expr
1772 this doesn't fold the call, hence it will always return a CALL_EXPR. */
1775 build_call_n_expr (tree fndecl
, int n
, ...)
1778 tree fntype
= TREE_TYPE (fndecl
);
1779 tree fn
= build1 (ADDR_EXPR
, build_pointer_type (fntype
), fndecl
);
1782 fn
= build_call_valist (TREE_TYPE (fntype
), fn
, n
, ap
);
1787 /* Build a goto to LABEL for a raise, with an optional call to Local_Raise.
1788 MSG gives the exception's identity for the call to Local_Raise, if any. */
1791 build_goto_raise (Entity_Id gnat_label
, int msg
)
1793 tree gnu_label
= gnat_to_gnu_entity (gnat_label
, NULL_TREE
, false);
1794 tree gnu_result
= build1 (GOTO_EXPR
, void_type_node
, gnu_label
);
1795 Entity_Id local_raise
= Get_Local_Raise_Call_Entity ();
1797 /* If Local_Raise is present, build Local_Raise (Exception'Identity). */
1798 if (Present (local_raise
))
1800 tree gnu_local_raise
1801 = gnat_to_gnu_entity (local_raise
, NULL_TREE
, false);
1802 tree gnu_exception_entity
1803 = gnat_to_gnu_entity (Get_RT_Exception_Entity (msg
), NULL_TREE
, false);
1805 = build_call_n_expr (gnu_local_raise
, 1,
1806 build_unary_op (ADDR_EXPR
, NULL_TREE
,
1807 gnu_exception_entity
));
1809 = build2 (COMPOUND_EXPR
, void_type_node
, gnu_call
, gnu_result
);
1812 TREE_USED (gnu_label
) = 1;
1816 /* Expand the SLOC of GNAT_NODE, if present, into tree location information
1817 pointed to by FILENAME, LINE and COL. Fall back to the current location
1818 if GNAT_NODE is absent or has no SLOC. */
1821 expand_sloc (Node_Id gnat_node
, tree
*filename
, tree
*line
, tree
*col
)
1824 int line_number
, column_number
;
1826 if (Debug_Flag_NN
|| Exception_Locations_Suppressed
)
1832 else if (Present (gnat_node
) && Sloc (gnat_node
) != No_Location
)
1834 str
= Get_Name_String
1835 (Debug_Source_Name (Get_Source_File_Index (Sloc (gnat_node
))));
1836 line_number
= Get_Logical_Line_Number (Sloc (gnat_node
));
1837 column_number
= Get_Column_Number (Sloc (gnat_node
));
1841 str
= lbasename (LOCATION_FILE (input_location
));
1842 line_number
= LOCATION_LINE (input_location
);
1843 column_number
= LOCATION_COLUMN (input_location
);
1846 const int len
= strlen (str
);
1847 *filename
= build_string (len
, str
);
1848 TREE_TYPE (*filename
) = build_array_type (char_type_node
,
1849 build_index_type (size_int (len
)));
1850 *line
= build_int_cst (NULL_TREE
, line_number
);
1852 *col
= build_int_cst (NULL_TREE
, column_number
);
1855 /* Build a call to a function that raises an exception and passes file name
1856 and line number, if requested. MSG says which exception function to call.
1857 GNAT_NODE is the node conveying the source location for which the error
1858 should be signaled, or Empty in which case the error is signaled for the
1859 current location. KIND says which kind of exception node this is for,
1860 among N_Raise_{Constraint,Storage,Program}_Error. */
1863 build_call_raise (int msg
, Node_Id gnat_node
, char kind
)
1865 Entity_Id gnat_label
= get_exception_label (kind
);
1866 tree fndecl
= gnat_raise_decls
[msg
];
1867 tree filename
, line
;
1869 /* If this is to be done as a goto, handle that case. */
1870 if (Present (gnat_label
))
1871 return build_goto_raise (gnat_label
, msg
);
1873 expand_sloc (gnat_node
, &filename
, &line
, NULL
);
1876 build_call_n_expr (fndecl
, 2,
1878 build_pointer_type (char_type_node
),
1883 /* Similar to build_call_raise, with extra information about the column
1884 where the check failed. */
1887 build_call_raise_column (int msg
, Node_Id gnat_node
, char kind
)
1889 Entity_Id gnat_label
= get_exception_label (kind
);
1890 tree fndecl
= gnat_raise_decls_ext
[msg
];
1891 tree filename
, line
, col
;
1893 /* If this is to be done as a goto, handle that case. */
1894 if (Present (gnat_label
))
1895 return build_goto_raise (gnat_label
, msg
);
1897 expand_sloc (gnat_node
, &filename
, &line
, &col
);
1900 build_call_n_expr (fndecl
, 3,
1902 build_pointer_type (char_type_node
),
1907 /* Similar to build_call_raise_column, for an index or range check exception ,
1908 with extra information of the form "INDEX out of range FIRST..LAST". */
1911 build_call_raise_range (int msg
, Node_Id gnat_node
, char kind
,
1912 tree index
, tree first
, tree last
)
1914 Entity_Id gnat_label
= get_exception_label (kind
);
1915 tree fndecl
= gnat_raise_decls_ext
[msg
];
1916 tree filename
, line
, col
;
1918 /* If this is to be done as a goto, handle that case. */
1919 if (Present (gnat_label
))
1920 return build_goto_raise (gnat_label
, msg
);
1922 expand_sloc (gnat_node
, &filename
, &line
, &col
);
1925 build_call_n_expr (fndecl
, 6,
1927 build_pointer_type (char_type_node
),
1930 convert (integer_type_node
, index
),
1931 convert (integer_type_node
, first
),
1932 convert (integer_type_node
, last
));
1935 /* qsort comparer for the bit positions of two constructor elements
1936 for record components. */
1939 compare_elmt_bitpos (const PTR rt1
, const PTR rt2
)
1941 const constructor_elt
* const elmt1
= (const constructor_elt
*) rt1
;
1942 const constructor_elt
* const elmt2
= (const constructor_elt
*) rt2
;
1943 const_tree
const field1
= elmt1
->index
;
1944 const_tree
const field2
= elmt2
->index
;
1946 = tree_int_cst_compare (bit_position (field1
), bit_position (field2
));
1948 return ret
? ret
: (int) (DECL_UID (field1
) - DECL_UID (field2
));
1951 /* Return a CONSTRUCTOR of TYPE whose elements are V. */
1954 gnat_build_constructor (tree type
, vec
<constructor_elt
, va_gc
> *v
)
1956 bool allconstant
= (TREE_CODE (TYPE_SIZE (type
)) == INTEGER_CST
);
1957 bool read_only
= true;
1958 bool side_effects
= false;
1959 tree result
, obj
, val
;
1960 unsigned int n_elmts
;
1962 /* Scan the elements to see if they are all constant or if any has side
1963 effects, to let us set global flags on the resulting constructor. Count
1964 the elements along the way for possible sorting purposes below. */
1965 FOR_EACH_CONSTRUCTOR_ELT (v
, n_elmts
, obj
, val
)
1967 /* The predicate must be in keeping with output_constructor and, unlike
1968 initializer_constant_valid_p, we accept "&{...}" because we'll put
1969 the CONSTRUCTOR into the constant pool during gimplification. */
1970 if ((!TREE_CONSTANT (val
) && !TREE_STATIC (val
))
1971 || (TREE_CODE (type
) == RECORD_TYPE
1972 && CONSTRUCTOR_BITFIELD_P (obj
)
1973 && !initializer_constant_valid_for_bitfield_p (val
))
1974 || (!initializer_constant_valid_p (val
,
1976 TYPE_REVERSE_STORAGE_ORDER (type
))
1977 && !(TREE_CODE (val
) == ADDR_EXPR
1978 && TREE_CODE (TREE_OPERAND (val
, 0)) == CONSTRUCTOR
1979 && TREE_CONSTANT (TREE_OPERAND (val
, 0)))))
1980 allconstant
= false;
1982 if (!TREE_READONLY (val
))
1985 if (TREE_SIDE_EFFECTS (val
))
1986 side_effects
= true;
1989 /* For record types with constant components only, sort field list
1990 by increasing bit position. This is necessary to ensure the
1991 constructor can be output as static data. */
1992 if (allconstant
&& TREE_CODE (type
) == RECORD_TYPE
&& n_elmts
> 1)
1993 v
->qsort (compare_elmt_bitpos
);
1995 result
= build_constructor (type
, v
);
1996 CONSTRUCTOR_NO_CLEARING (result
) = 1;
1997 TREE_CONSTANT (result
) = TREE_STATIC (result
) = allconstant
;
1998 TREE_SIDE_EFFECTS (result
) = side_effects
;
1999 TREE_READONLY (result
) = TYPE_READONLY (type
) || read_only
|| allconstant
;
2003 /* Return a COMPONENT_REF to access FIELD in RECORD, or NULL_TREE if the field
2004 is not found in the record. Don't fold the result if NO_FOLD is true. */
2007 build_simple_component_ref (tree record
, tree field
, bool no_fold
)
2009 tree type
= TYPE_MAIN_VARIANT (TREE_TYPE (record
));
2012 /* The failure of this assertion will very likely come from a missing
2013 insertion of an explicit dereference. */
2014 gcc_assert (RECORD_OR_UNION_TYPE_P (type
) && COMPLETE_TYPE_P (type
));
2016 /* Try to fold a conversion from another record or union type unless the type
2017 contains a placeholder as it might be needed for a later substitution. */
2018 if (TREE_CODE (record
) == VIEW_CONVERT_EXPR
2019 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (TREE_OPERAND (record
, 0)))
2020 && !type_contains_placeholder_p (type
))
2022 tree op
= TREE_OPERAND (record
, 0);
2024 /* If this is an unpadding operation, convert the underlying object to
2025 the unpadded type directly. */
2026 if (TYPE_IS_PADDING_P (type
) && field
== TYPE_FIELDS (type
))
2027 return convert (TREE_TYPE (field
), op
);
2029 /* Otherwise try to access FIELD directly in the underlying type, but
2030 make sure that the form of the reference doesn't change too much;
2031 this can happen for an unconstrained bit-packed array type whose
2032 constrained form can be an integer type. */
2033 ref
= build_simple_component_ref (op
, field
, no_fold
);
2034 if (ref
&& TREE_CODE (TREE_TYPE (ref
)) == TREE_CODE (TREE_TYPE (field
)))
2038 /* If this field is not in the specified record, see if we can find a field
2039 in the specified record whose original field is the same as this one. */
2040 if (DECL_CONTEXT (field
) != type
)
2044 /* First loop through normal components. */
2045 for (new_field
= TYPE_FIELDS (type
);
2047 new_field
= DECL_CHAIN (new_field
))
2048 if (SAME_FIELD_P (field
, new_field
))
2051 /* Next, loop through DECL_INTERNAL_P components if we haven't found the
2052 component in the first search. Doing this search in two steps is
2053 required to avoid hidden homonymous fields in the _Parent field. */
2055 for (new_field
= TYPE_FIELDS (type
);
2057 new_field
= DECL_CHAIN (new_field
))
2058 if (DECL_INTERNAL_P (new_field
)
2059 && RECORD_OR_UNION_TYPE_P (TREE_TYPE (new_field
)))
2062 = build_simple_component_ref (record
, new_field
, no_fold
);
2063 ref
= build_simple_component_ref (field_ref
, field
, no_fold
);
2074 /* If the field's offset has overflowed, do not try to access it, as doing
2075 so may trigger sanity checks deeper in the back-end. Note that we don't
2076 need to warn since this will be done on trying to declare the object. */
2077 if (TREE_CODE (DECL_FIELD_OFFSET (field
)) == INTEGER_CST
2078 && TREE_OVERFLOW (DECL_FIELD_OFFSET (field
)))
2079 return build1 (NULL_EXPR
, TREE_TYPE (field
),
2080 build_call_raise (SE_Object_Too_Large
, Empty
,
2081 N_Raise_Storage_Error
));
2083 ref
= build3 (COMPONENT_REF
, TREE_TYPE (field
), record
, field
, NULL_TREE
);
2085 if (TREE_READONLY (record
)
2086 || TREE_READONLY (field
)
2087 || TYPE_READONLY (type
))
2088 TREE_READONLY (ref
) = 1;
2090 if (TREE_THIS_VOLATILE (record
)
2091 || TREE_THIS_VOLATILE (field
)
2092 || TYPE_VOLATILE (type
))
2093 TREE_THIS_VOLATILE (ref
) = 1;
2098 /* The generic folder may punt in this case because the inner array type
2099 can be self-referential, but folding is in fact not problematic. */
2100 if (TREE_CODE (record
) == CONSTRUCTOR
2101 && TYPE_CONTAINS_TEMPLATE_P (TREE_TYPE (record
)))
2103 vec
<constructor_elt
, va_gc
> *elts
= CONSTRUCTOR_ELTS (record
);
2104 unsigned HOST_WIDE_INT idx
;
2106 FOR_EACH_CONSTRUCTOR_ELT (elts
, idx
, index
, value
)
2115 /* Likewise, but return NULL_EXPR and generate a Program_Error if the
2116 field is not found in the record. */
2119 build_component_ref (tree record
, tree field
, bool no_fold
)
2121 tree ref
= build_simple_component_ref (record
, field
, no_fold
);
2125 /* The missing field should have been detected in the front-end. */
2126 gigi_checking_assert (false);
2128 /* Assume this is an invalid user field so raise Program_Error. */
2129 return build1 (NULL_EXPR
, TREE_TYPE (field
),
2130 build_call_raise (PE_Explicit_Raise
, Empty
,
2131 N_Raise_Program_Error
));
2134 /* Helper for build_call_alloc_dealloc, with arguments to be interpreted
2135 identically. Process the case where a GNAT_PROC to call is provided. */
2138 build_call_alloc_dealloc_proc (tree gnu_obj
, tree gnu_size
, tree gnu_type
,
2139 Entity_Id gnat_proc
, Entity_Id gnat_pool
)
2141 tree gnu_proc
= gnat_to_gnu (gnat_proc
);
2144 /* A storage pool's underlying type is a record type (for both predefined
2145 storage pools and GNAT simple storage pools). The secondary stack uses
2146 the same mechanism, but its pool object (SS_Pool) is an integer. */
2147 if (Is_Record_Type (Underlying_Type (Etype (gnat_pool
))))
2149 /* The size is the third parameter; the alignment is the
2151 Entity_Id gnat_size_type
2152 = Etype (Next_Formal (Next_Formal (First_Formal (gnat_proc
))));
2153 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
2155 tree gnu_pool
= gnat_to_gnu (gnat_pool
);
2156 tree gnu_pool_addr
= build_unary_op (ADDR_EXPR
, NULL_TREE
, gnu_pool
);
2157 tree gnu_align
= size_int (TYPE_ALIGN (gnu_type
) / BITS_PER_UNIT
);
2159 gnu_size
= convert (gnu_size_type
, gnu_size
);
2160 gnu_align
= convert (gnu_size_type
, gnu_align
);
2162 /* The first arg is always the address of the storage pool; next
2163 comes the address of the object, for a deallocator, then the
2164 size and alignment. */
2166 gnu_call
= build_call_n_expr (gnu_proc
, 4, gnu_pool_addr
, gnu_obj
,
2167 gnu_size
, gnu_align
);
2169 gnu_call
= build_call_n_expr (gnu_proc
, 3, gnu_pool_addr
,
2170 gnu_size
, gnu_align
);
2173 /* Secondary stack case. */
2176 /* The size is the second parameter. */
2177 Entity_Id gnat_size_type
2178 = Etype (Next_Formal (First_Formal (gnat_proc
)));
2179 tree gnu_size_type
= gnat_to_gnu_type (gnat_size_type
);
2181 gnu_size
= convert (gnu_size_type
, gnu_size
);
2183 /* The first arg is the address of the object, for a deallocator,
2186 gnu_call
= build_call_n_expr (gnu_proc
, 2, gnu_obj
, gnu_size
);
2188 gnu_call
= build_call_n_expr (gnu_proc
, 1, gnu_size
);
2194 /* Helper for build_call_alloc_dealloc, to build and return an allocator for
2195 DATA_SIZE bytes aimed at containing a DATA_TYPE object, using the default
2196 __gnat_malloc allocator. Honor DATA_TYPE alignments greater than what the
2200 maybe_wrap_malloc (tree data_size
, tree data_type
, Node_Id gnat_node
)
2202 /* When the DATA_TYPE alignment is stricter than what malloc offers
2203 (super-aligned case), we allocate an "aligning" wrapper type and return
2204 the address of its single data field with the malloc's return value
2205 stored just in front. */
2207 unsigned int data_align
= TYPE_ALIGN (data_type
);
2208 unsigned int system_allocator_alignment
2209 = get_target_system_allocator_alignment () * BITS_PER_UNIT
;
2212 = ((data_align
> system_allocator_alignment
)
2213 ? make_aligning_type (data_type
, data_align
, data_size
,
2214 system_allocator_alignment
,
2215 POINTER_SIZE
/ BITS_PER_UNIT
,
2220 = aligning_type
? TYPE_SIZE_UNIT (aligning_type
) : data_size
;
2222 tree malloc_ptr
= build_call_n_expr (malloc_decl
, 1, size_to_malloc
);
2226 /* Latch malloc's return value and get a pointer to the aligning field
2228 tree storage_ptr
= gnat_protect_expr (malloc_ptr
);
2230 tree aligning_record_addr
2231 = convert (build_pointer_type (aligning_type
), storage_ptr
);
2233 tree aligning_record
2234 = build_unary_op (INDIRECT_REF
, NULL_TREE
, aligning_record_addr
);
2237 = build_component_ref (aligning_record
, TYPE_FIELDS (aligning_type
),
2240 tree aligning_field_addr
2241 = build_unary_op (ADDR_EXPR
, NULL_TREE
, aligning_field
);
2243 /* Then arrange to store the allocator's return value ahead
2245 tree storage_ptr_slot_addr
2246 = build_binary_op (POINTER_PLUS_EXPR
, ptr_type_node
,
2247 convert (ptr_type_node
, aligning_field_addr
),
2248 size_int (-(HOST_WIDE_INT
) POINTER_SIZE
2251 tree storage_ptr_slot
2252 = build_unary_op (INDIRECT_REF
, NULL_TREE
,
2253 convert (build_pointer_type (ptr_type_node
),
2254 storage_ptr_slot_addr
));
2257 build2 (COMPOUND_EXPR
, TREE_TYPE (aligning_field_addr
),
2258 build_binary_op (INIT_EXPR
, NULL_TREE
,
2259 storage_ptr_slot
, storage_ptr
),
2260 aligning_field_addr
);
2266 /* Helper for build_call_alloc_dealloc, to release a DATA_TYPE object
2267 designated by DATA_PTR using the __gnat_free entry point. */
2270 maybe_wrap_free (tree data_ptr
, tree data_type
)
2272 /* In the regular alignment case, we pass the data pointer straight to free.
2273 In the superaligned case, we need to retrieve the initial allocator
2274 return value, stored in front of the data block at allocation time. */
2276 unsigned int data_align
= TYPE_ALIGN (data_type
);
2277 unsigned int system_allocator_alignment
2278 = get_target_system_allocator_alignment () * BITS_PER_UNIT
;
2282 if (data_align
> system_allocator_alignment
)
2284 /* DATA_FRONT_PTR (void *)
2285 = (void *)DATA_PTR - (void *)sizeof (void *)) */
2288 (POINTER_PLUS_EXPR
, ptr_type_node
,
2289 convert (ptr_type_node
, data_ptr
),
2290 size_int (-(HOST_WIDE_INT
) POINTER_SIZE
/ BITS_PER_UNIT
));
2292 /* FREE_PTR (void *) = *(void **)DATA_FRONT_PTR */
2295 (INDIRECT_REF
, NULL_TREE
,
2296 convert (build_pointer_type (ptr_type_node
), data_front_ptr
));
2299 free_ptr
= data_ptr
;
2301 return build_call_n_expr (free_decl
, 1, free_ptr
);
2304 /* Build a GCC tree to call an allocation or deallocation function.
2305 If GNU_OBJ is nonzero, it is an object to deallocate. Otherwise,
2306 generate an allocator.
2308 GNU_SIZE is the number of bytes to allocate and GNU_TYPE is the contained
2309 object type, used to determine the to-be-honored address alignment.
2310 GNAT_PROC, if present, is a procedure to call and GNAT_POOL is the storage
2311 pool to use. If not present, malloc and free are used. GNAT_NODE is used
2312 to provide an error location for restriction violation messages. */
2315 build_call_alloc_dealloc (tree gnu_obj
, tree gnu_size
, tree gnu_type
,
2316 Entity_Id gnat_proc
, Entity_Id gnat_pool
,
2319 /* Explicit proc to call ? This one is assumed to deal with the type
2320 alignment constraints. */
2321 if (Present (gnat_proc
))
2322 return build_call_alloc_dealloc_proc (gnu_obj
, gnu_size
, gnu_type
,
2323 gnat_proc
, gnat_pool
);
2325 /* Otherwise, object to "free" or "malloc" with possible special processing
2326 for alignments stricter than what the default allocator honors. */
2328 return maybe_wrap_free (gnu_obj
, gnu_type
);
2331 /* Assert that we no longer can be called with this special pool. */
2332 gcc_assert (gnat_pool
!= -1);
2334 /* Check that we aren't violating the associated restriction. */
2335 if (!(Nkind (gnat_node
) == N_Allocator
&& Comes_From_Source (gnat_node
)))
2337 Check_No_Implicit_Heap_Alloc (gnat_node
);
2338 if (Has_Task (Etype (gnat_node
)))
2339 Check_No_Implicit_Task_Alloc (gnat_node
);
2340 if (Has_Protected (Etype (gnat_node
)))
2341 Check_No_Implicit_Protected_Alloc (gnat_node
);
2343 return maybe_wrap_malloc (gnu_size
, gnu_type
, gnat_node
);
2347 /* Build a GCC tree that corresponds to allocating an object of TYPE whose
2348 initial value is INIT, if INIT is nonzero. Convert the expression to
2349 RESULT_TYPE, which must be some pointer type, and return the result.
2351 GNAT_PROC and GNAT_POOL optionally give the procedure to call and
2352 the storage pool to use. GNAT_NODE is used to provide an error
2353 location for restriction violation messages. If IGNORE_INIT_TYPE is
2354 true, ignore the type of INIT for the purpose of determining the size;
2355 this will cause the maximum size to be allocated if TYPE is of
2356 self-referential size. */
2359 build_allocator (tree type
, tree init
, tree result_type
, Entity_Id gnat_proc
,
2360 Entity_Id gnat_pool
, Node_Id gnat_node
, bool ignore_init_type
)
2362 tree size
, storage
, storage_deref
, storage_init
;
2364 /* If the initializer, if present, is a NULL_EXPR, just return a new one. */
2365 if (init
&& TREE_CODE (init
) == NULL_EXPR
)
2366 return build1 (NULL_EXPR
, result_type
, TREE_OPERAND (init
, 0));
2368 /* If we are just annotating types, also return a NULL_EXPR. */
2369 else if (type_annotate_only
)
2370 return build1 (NULL_EXPR
, result_type
,
2371 build_call_raise (CE_Range_Check_Failed
, gnat_node
,
2372 N_Raise_Constraint_Error
));
2374 /* If the initializer, if present, is a COND_EXPR, deal with each branch. */
2375 else if (init
&& TREE_CODE (init
) == COND_EXPR
)
2376 return build3 (COND_EXPR
, result_type
, TREE_OPERAND (init
, 0),
2377 build_allocator (type
, TREE_OPERAND (init
, 1), result_type
,
2378 gnat_proc
, gnat_pool
, gnat_node
,
2380 build_allocator (type
, TREE_OPERAND (init
, 2), result_type
,
2381 gnat_proc
, gnat_pool
, gnat_node
,
2384 /* If RESULT_TYPE is a fat or thin pointer, set SIZE to be the sum of the
2385 sizes of the object and its template. Allocate the whole thing and
2386 fill in the parts that are known. */
2387 else if (TYPE_IS_FAT_OR_THIN_POINTER_P (result_type
))
2390 = build_unc_object_type_from_ptr (result_type
, type
,
2391 get_identifier ("ALLOC"), false);
2392 tree template_type
= TREE_TYPE (TYPE_FIELDS (storage_type
));
2393 tree storage_ptr_type
= build_pointer_type (storage_type
);
2395 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (TYPE_SIZE_UNIT (storage_type
),
2398 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2399 if (TREE_CODE (size
) == INTEGER_CST
&& !valid_constant_size_p (size
))
2400 size
= size_int (-1);
2402 storage
= build_call_alloc_dealloc (NULL_TREE
, size
, storage_type
,
2403 gnat_proc
, gnat_pool
, gnat_node
);
2404 storage
= convert (storage_ptr_type
, gnat_protect_expr (storage
));
2405 storage_deref
= build_unary_op (INDIRECT_REF
, NULL_TREE
, storage
);
2406 TREE_THIS_NOTRAP (storage_deref
) = 1;
2408 /* If there is an initializing expression, then make a constructor for
2409 the entire object including the bounds and copy it into the object.
2410 If there is no initializing expression, just set the bounds. */
2413 vec
<constructor_elt
, va_gc
> *v
;
2416 CONSTRUCTOR_APPEND_ELT (v
, TYPE_FIELDS (storage_type
),
2417 build_template (template_type
, type
, init
));
2418 CONSTRUCTOR_APPEND_ELT (v
, DECL_CHAIN (TYPE_FIELDS (storage_type
)),
2421 = build_binary_op (INIT_EXPR
, NULL_TREE
, storage_deref
,
2422 gnat_build_constructor (storage_type
, v
));
2426 = build_binary_op (INIT_EXPR
, NULL_TREE
,
2427 build_component_ref (storage_deref
,
2428 TYPE_FIELDS (storage_type
),
2430 build_template (template_type
, type
, NULL_TREE
));
2432 return build2 (COMPOUND_EXPR
, result_type
,
2433 storage_init
, convert (result_type
, storage
));
2436 size
= TYPE_SIZE_UNIT (type
);
2438 /* If we have an initializing expression, see if its size is simpler
2439 than the size from the type. */
2440 if (!ignore_init_type
&& init
&& TYPE_SIZE_UNIT (TREE_TYPE (init
))
2441 && (TREE_CODE (TYPE_SIZE_UNIT (TREE_TYPE (init
))) == INTEGER_CST
2442 || CONTAINS_PLACEHOLDER_P (size
)))
2443 size
= TYPE_SIZE_UNIT (TREE_TYPE (init
));
2445 /* If the size is still self-referential, reference the initializing
2446 expression, if it is present. If not, this must have been a call
2447 to allocate a library-level object, in which case we just use the
2449 if (!ignore_init_type
&& init
)
2450 size
= SUBSTITUTE_PLACEHOLDER_IN_EXPR (size
, init
);
2451 else if (CONTAINS_PLACEHOLDER_P (size
))
2452 size
= max_size (size
, true);
2454 /* If the size overflows, pass -1 so Storage_Error will be raised. */
2455 if (TREE_CODE (size
) == INTEGER_CST
&& !valid_constant_size_p (size
))
2456 size
= size_int (-1);
2458 storage
= convert (result_type
,
2459 build_call_alloc_dealloc (NULL_TREE
, size
, type
,
2460 gnat_proc
, gnat_pool
,
2463 /* If we have an initial value, protect the new address, assign the value
2464 and return the address with a COMPOUND_EXPR. */
2467 storage
= gnat_protect_expr (storage
);
2468 storage_deref
= build_unary_op (INDIRECT_REF
, NULL_TREE
, storage
);
2469 TREE_THIS_NOTRAP (storage_deref
) = 1;
2471 = build_binary_op (INIT_EXPR
, NULL_TREE
, storage_deref
, init
);
2472 return build2 (COMPOUND_EXPR
, result_type
, storage_init
, storage
);
2478 /* Indicate that we need to take the address of T and that it therefore
2479 should not be allocated in a register. Return true if successful. */
2482 gnat_mark_addressable (tree t
)
2485 switch (TREE_CODE (t
))
2490 case ARRAY_RANGE_REF
:
2493 case VIEW_CONVERT_EXPR
:
2494 case NON_LVALUE_EXPR
:
2496 t
= TREE_OPERAND (t
, 0);
2500 t
= TREE_OPERAND (t
, 1);
2504 TREE_ADDRESSABLE (t
) = 1;
2510 TREE_ADDRESSABLE (t
) = 1;
2514 TREE_ADDRESSABLE (t
) = 1;
2518 return DECL_CONST_CORRESPONDING_VAR (t
)
2519 && gnat_mark_addressable (DECL_CONST_CORRESPONDING_VAR (t
));
2526 /* Return true if EXP is a stable expression for the purpose of the functions
2527 below and, therefore, can be returned unmodified by them. We accept things
2528 that are actual constants or that have already been handled. */
2531 gnat_stable_expr_p (tree exp
)
2533 enum tree_code code
= TREE_CODE (exp
);
2534 return TREE_CONSTANT (exp
) || code
== NULL_EXPR
|| code
== SAVE_EXPR
;
2537 /* Save EXP for later use or reuse. This is equivalent to save_expr in tree.cc
2538 but we know how to handle our own nodes. */
2541 gnat_save_expr (tree exp
)
2543 tree type
= TREE_TYPE (exp
);
2544 enum tree_code code
= TREE_CODE (exp
);
2546 if (gnat_stable_expr_p (exp
))
2549 if (code
== UNCONSTRAINED_ARRAY_REF
)
2551 tree t
= build1 (code
, type
, gnat_save_expr (TREE_OPERAND (exp
, 0)));
2552 TREE_READONLY (t
) = TYPE_READONLY (type
);
2556 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2557 This may be more efficient, but will also allow us to more easily find
2558 the match for the PLACEHOLDER_EXPR. */
2559 if (code
== COMPONENT_REF
2560 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2561 return build3 (code
, type
, gnat_save_expr (TREE_OPERAND (exp
, 0)),
2562 TREE_OPERAND (exp
, 1), NULL_TREE
);
2564 return save_expr (exp
);
2567 /* Protect EXP for immediate reuse. This is a variant of gnat_save_expr that
2568 is optimized under the assumption that EXP's value doesn't change before
2569 its subsequent reuse(s) except through its potential reevaluation. */
2572 gnat_protect_expr (tree exp
)
2574 tree type
= TREE_TYPE (exp
);
2575 enum tree_code code
= TREE_CODE (exp
);
2577 if (gnat_stable_expr_p (exp
))
2580 /* If EXP has no side effects, we theoretically don't need to do anything.
2581 However, we may be recursively passed more and more complex expressions
2582 involving checks which will be reused multiple times and eventually be
2583 unshared for gimplification; in order to avoid a complexity explosion
2584 at that point, we protect any expressions more complex than a simple
2585 arithmetic expression. */
2586 if (!TREE_SIDE_EFFECTS (exp
))
2588 tree inner
= skip_simple_arithmetic (exp
);
2589 if (!EXPR_P (inner
) || REFERENCE_CLASS_P (inner
))
2593 /* If this is a conversion, protect what's inside the conversion. */
2594 if (code
== NON_LVALUE_EXPR
2595 || CONVERT_EXPR_CODE_P (code
)
2596 || code
== VIEW_CONVERT_EXPR
)
2597 return build1 (code
, type
, gnat_protect_expr (TREE_OPERAND (exp
, 0)));
2599 /* If we're indirectly referencing something, we only need to protect the
2600 address since the data itself can't change in these situations. */
2601 if (code
== INDIRECT_REF
|| code
== UNCONSTRAINED_ARRAY_REF
)
2603 tree t
= build1 (code
, type
, gnat_protect_expr (TREE_OPERAND (exp
, 0)));
2604 TREE_READONLY (t
) = TYPE_READONLY (type
);
2608 /* Likewise if we're indirectly referencing part of something. */
2609 if (code
== COMPONENT_REF
2610 && TREE_CODE (TREE_OPERAND (exp
, 0)) == INDIRECT_REF
)
2611 return build3 (code
, type
, gnat_protect_expr (TREE_OPERAND (exp
, 0)),
2612 TREE_OPERAND (exp
, 1), NULL_TREE
);
2614 /* If this is a COMPONENT_REF of a fat pointer, save the entire fat pointer.
2615 This may be more efficient, but will also allow us to more easily find
2616 the match for the PLACEHOLDER_EXPR. */
2617 if (code
== COMPONENT_REF
2618 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2619 return build3 (code
, type
, gnat_protect_expr (TREE_OPERAND (exp
, 0)),
2620 TREE_OPERAND (exp
, 1), NULL_TREE
);
2622 /* If this is a fat pointer or a scalar, just make a SAVE_EXPR. Likewise
2623 for a CALL_EXPR as large objects are returned via invisible reference
2624 in most ABIs so the temporary will directly be filled by the callee. */
2625 if (TYPE_IS_FAT_POINTER_P (type
)
2626 || !AGGREGATE_TYPE_P (type
)
2627 || code
== CALL_EXPR
)
2628 return save_expr (exp
);
2630 /* Otherwise reference, protect the address and dereference. */
2632 build_unary_op (INDIRECT_REF
, type
,
2633 save_expr (build_unary_op (ADDR_EXPR
, NULL_TREE
, exp
)));
2636 /* This is equivalent to stabilize_reference_1 in tree.cc but we take an extra
2637 argument to force evaluation of everything. */
2640 gnat_stabilize_reference_1 (tree e
, void *data
)
2642 const bool force
= *(bool *)data
;
2643 enum tree_code code
= TREE_CODE (e
);
2644 tree type
= TREE_TYPE (e
);
2647 if (gnat_stable_expr_p (e
))
2650 switch (TREE_CODE_CLASS (code
))
2652 case tcc_exceptional
:
2653 case tcc_declaration
:
2654 case tcc_comparison
:
2655 case tcc_expression
:
2658 /* If this is a COMPONENT_REF of a fat pointer, save the entire
2659 fat pointer. This may be more efficient, but will also allow
2660 us to more easily find the match for the PLACEHOLDER_EXPR. */
2661 if (code
== COMPONENT_REF
2662 && TYPE_IS_FAT_POINTER_P (TREE_TYPE (TREE_OPERAND (e
, 0))))
2664 = build3 (code
, type
,
2665 gnat_stabilize_reference_1 (TREE_OPERAND (e
, 0), data
),
2666 TREE_OPERAND (e
, 1), NULL_TREE
);
2667 /* If the expression has side-effects, then encase it in a SAVE_EXPR
2668 so that it will only be evaluated once. */
2669 /* The tcc_reference and tcc_comparison classes could be handled as
2670 below, but it is generally faster to only evaluate them once. */
2671 else if (TREE_SIDE_EFFECTS (e
) || force
)
2672 return save_expr (e
);
2678 /* Recursively stabilize each operand. */
2680 = build2 (code
, type
,
2681 gnat_stabilize_reference_1 (TREE_OPERAND (e
, 0), data
),
2682 gnat_stabilize_reference_1 (TREE_OPERAND (e
, 1), data
));
2686 /* Recursively stabilize each operand. */
2688 = build1 (code
, type
,
2689 gnat_stabilize_reference_1 (TREE_OPERAND (e
, 0), data
));
2696 /* See gnat_rewrite_reference below for the rationale. */
2697 TREE_READONLY (result
) = TREE_READONLY (e
);
2698 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (e
);
2700 if (TREE_SIDE_EFFECTS (e
))
2701 TREE_SIDE_EFFECTS (result
) = 1;
2706 /* This is equivalent to stabilize_reference in tree.cc but we know how to
2707 handle our own nodes and we take extra arguments. FORCE says whether to
2708 force evaluation of everything in REF. INIT is set to the first arm of
2709 a COMPOUND_EXPR present in REF, if any. */
2712 gnat_stabilize_reference (tree ref
, bool force
, tree
*init
)
2715 gnat_rewrite_reference (ref
, gnat_stabilize_reference_1
, &force
, init
);
2718 /* Rewrite reference REF and call FUNC on each expression within REF in the
2719 process. DATA is passed unmodified to FUNC. INIT is set to the first
2720 arm of a COMPOUND_EXPR present in REF, if any. */
2723 gnat_rewrite_reference (tree ref
, rewrite_fn func
, void *data
, tree
*init
)
2725 tree type
= TREE_TYPE (ref
);
2726 enum tree_code code
= TREE_CODE (ref
);
2735 /* No action is needed in this case. */
2740 case FIX_TRUNC_EXPR
:
2743 case VIEW_CONVERT_EXPR
:
2745 = build1 (code
, type
,
2746 gnat_rewrite_reference (TREE_OPERAND (ref
, 0), func
, data
,
2751 case UNCONSTRAINED_ARRAY_REF
:
2752 result
= build1 (code
, type
, func (TREE_OPERAND (ref
, 0), data
));
2756 result
= build3 (COMPONENT_REF
, type
,
2757 gnat_rewrite_reference (TREE_OPERAND (ref
, 0), func
,
2759 TREE_OPERAND (ref
, 1), NULL_TREE
);
2763 result
= build3 (BIT_FIELD_REF
, type
,
2764 gnat_rewrite_reference (TREE_OPERAND (ref
, 0), func
,
2766 TREE_OPERAND (ref
, 1), TREE_OPERAND (ref
, 2));
2767 REF_REVERSE_STORAGE_ORDER (result
) = REF_REVERSE_STORAGE_ORDER (ref
);
2771 case ARRAY_RANGE_REF
:
2773 = build4 (code
, type
,
2774 gnat_rewrite_reference (TREE_OPERAND (ref
, 0), func
, data
,
2776 func (TREE_OPERAND (ref
, 1), data
),
2777 TREE_OPERAND (ref
, 2), NULL_TREE
);
2781 gcc_assert (!*init
);
2782 *init
= TREE_OPERAND (ref
, 0);
2783 /* We expect only the pattern built in Call_to_gnu. */
2784 gcc_assert (DECL_P (TREE_OPERAND (ref
, 1))
2785 || (TREE_CODE (TREE_OPERAND (ref
, 1)) == COMPONENT_REF
2786 && DECL_P (TREE_OPERAND (TREE_OPERAND (ref
, 1), 0))));
2787 return TREE_OPERAND (ref
, 1);
2791 /* This can only be an atomic load. */
2792 gcc_assert (call_is_atomic_load (ref
));
2794 /* An atomic load is an INDIRECT_REF of its first argument. */
2795 tree t
= CALL_EXPR_ARG (ref
, 0);
2796 if (TREE_CODE (t
) == NOP_EXPR
)
2797 t
= TREE_OPERAND (t
, 0);
2798 if (TREE_CODE (t
) == ADDR_EXPR
)
2799 t
= build1 (ADDR_EXPR
, TREE_TYPE (t
),
2800 gnat_rewrite_reference (TREE_OPERAND (t
, 0), func
, data
,
2804 t
= fold_convert (TREE_TYPE (CALL_EXPR_ARG (ref
, 0)), t
);
2806 result
= build_call_expr (TREE_OPERAND (CALL_EXPR_FN (ref
), 0), 2,
2807 t
, CALL_EXPR_ARG (ref
, 1));
2819 /* TREE_READONLY and TREE_THIS_VOLATILE set on the initial expression may
2820 not be sustained across some paths, such as the one for INDIRECT_REF.
2822 Special care should be taken regarding TREE_SIDE_EFFECTS, because some
2823 paths introduce side-effects where there was none initially (e.g. if a
2824 SAVE_EXPR is built) and we also want to keep track of that. */
2825 TREE_READONLY (result
) = TREE_READONLY (ref
);
2826 TREE_THIS_VOLATILE (result
) = TREE_THIS_VOLATILE (ref
);
2828 if (TREE_SIDE_EFFECTS (ref
))
2829 TREE_SIDE_EFFECTS (result
) = 1;
2831 if (code
== INDIRECT_REF
2832 || code
== UNCONSTRAINED_ARRAY_REF
2833 || code
== ARRAY_REF
2834 || code
== ARRAY_RANGE_REF
)
2835 TREE_THIS_NOTRAP (result
) = TREE_THIS_NOTRAP (ref
);
2840 /* This is equivalent to get_inner_reference in expr.cc but it returns the
2841 ultimate containing object only if the reference (lvalue) is constant,
2842 i.e. if it doesn't depend on the context in which it is evaluated. */
2845 get_inner_constant_reference (tree exp
)
2849 switch (TREE_CODE (exp
))
2855 if (!TREE_CONSTANT (DECL_FIELD_OFFSET (TREE_OPERAND (exp
, 1))))
2860 case ARRAY_RANGE_REF
:
2862 if (TREE_OPERAND (exp
, 2))
2865 tree array_type
= TREE_TYPE (TREE_OPERAND (exp
, 0));
2866 if (!TREE_CONSTANT (TREE_OPERAND (exp
, 1))
2867 || !TREE_CONSTANT (TYPE_MIN_VALUE (TYPE_DOMAIN (array_type
)))
2868 || !TREE_CONSTANT (TYPE_SIZE_UNIT (TREE_TYPE (array_type
))))
2875 case VIEW_CONVERT_EXPR
:
2882 exp
= TREE_OPERAND (exp
, 0);
2889 /* Return true if EXPR is the addition or the subtraction of a constant and,
2890 if so, set *ADD to the addend, *CST to the constant and *MINUS_P to true
2891 if this is a subtraction. */
2894 is_simple_additive_expression (tree expr
, tree
*add
, tree
*cst
, bool *minus_p
)
2896 /* Skip overflow checks. */
2897 if (TREE_CODE (expr
) == COND_EXPR
2898 && TREE_CODE (COND_EXPR_THEN (expr
)) == COMPOUND_EXPR
2899 && TREE_CODE (TREE_OPERAND (COND_EXPR_THEN (expr
), 0)) == CALL_EXPR
2900 && get_callee_fndecl (TREE_OPERAND (COND_EXPR_THEN (expr
), 0))
2901 == gnat_raise_decls
[CE_Overflow_Check_Failed
])
2902 expr
= COND_EXPR_ELSE (expr
);
2904 if (TREE_CODE (expr
) == PLUS_EXPR
)
2906 if (TREE_CONSTANT (TREE_OPERAND (expr
, 0)))
2908 *add
= TREE_OPERAND (expr
, 1);
2909 *cst
= TREE_OPERAND (expr
, 0);
2913 else if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
2915 *add
= TREE_OPERAND (expr
, 0);
2916 *cst
= TREE_OPERAND (expr
, 1);
2921 else if (TREE_CODE (expr
) == MINUS_EXPR
)
2923 if (TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
2925 *add
= TREE_OPERAND (expr
, 0);
2926 *cst
= TREE_OPERAND (expr
, 1);
2935 /* If EXPR is an expression that is invariant in the current function, in the
2936 sense that it can be evaluated anywhere in the function and any number of
2937 times, return EXPR or an equivalent expression. Otherwise return NULL. */
2940 gnat_invariant_expr (tree expr
)
2942 tree type
= TREE_TYPE (expr
);
2946 expr
= remove_conversions (expr
, false);
2948 /* Look through temporaries created to capture values. */
2949 while ((TREE_CODE (expr
) == CONST_DECL
2950 || (TREE_CODE (expr
) == VAR_DECL
&& TREE_READONLY (expr
)))
2951 && decl_function_context (expr
) == current_function_decl
2952 && DECL_INITIAL (expr
))
2954 expr
= DECL_INITIAL (expr
);
2955 /* Look into CONSTRUCTORs built to initialize padded types. */
2956 expr
= maybe_padded_object (expr
);
2957 expr
= remove_conversions (expr
, false);
2960 /* We are only interested in scalar types at the moment and, even if we may
2961 have gone through padding types in the above loop, we must be back to a
2962 scalar value at this point. */
2963 if (AGGREGATE_TYPE_P (TREE_TYPE (expr
)))
2966 if (TREE_CONSTANT (expr
))
2967 return fold_convert (type
, expr
);
2969 /* Deal with aligning patterns. */
2970 if (TREE_CODE (expr
) == BIT_AND_EXPR
2971 && TREE_CONSTANT (TREE_OPERAND (expr
, 1)))
2973 tree op0
= gnat_invariant_expr (TREE_OPERAND (expr
, 0));
2975 return fold_build2 (BIT_AND_EXPR
, type
, op0
, TREE_OPERAND (expr
, 1));
2980 /* Deal with addition or subtraction of constants. */
2981 if (is_simple_additive_expression (expr
, &add
, &cst
, &minus_p
))
2983 add
= gnat_invariant_expr (add
);
2986 fold_build2 (minus_p
? MINUS_EXPR
: PLUS_EXPR
, type
,
2987 fold_convert (type
, add
), fold_convert (type
, cst
));
2992 bool invariant_p
= false;
2997 switch (TREE_CODE (t
))
3000 invariant_p
|= DECL_INVARIANT_P (TREE_OPERAND (t
, 1));
3004 case ARRAY_RANGE_REF
:
3005 if (!TREE_CONSTANT (TREE_OPERAND (t
, 1)) || TREE_OPERAND (t
, 2))
3012 case VIEW_CONVERT_EXPR
:
3017 if ((!invariant_p
&& !TREE_READONLY (t
)) || TREE_SIDE_EFFECTS (t
))
3019 invariant_p
= false;
3026 t
= TREE_OPERAND (t
, 0);
3030 if (TREE_SIDE_EFFECTS (t
))
3033 if (TREE_CODE (t
) == CONST_DECL
3034 && (DECL_EXTERNAL (t
)
3035 || decl_function_context (t
) != current_function_decl
))
3036 return fold_convert (type
, expr
);
3038 if (!invariant_p
&& !TREE_READONLY (t
))
3041 if (TREE_CODE (t
) == PARM_DECL
)
3042 return fold_convert (type
, expr
);
3044 if (TREE_CODE (t
) == VAR_DECL
3045 && (DECL_EXTERNAL (t
)
3046 || decl_function_context (t
) != current_function_decl
))
3047 return fold_convert (type
, expr
);