1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
25 #include "diagnostic-core.h"
30 #include "double-int.h"
38 #include "stor-layout.h"
42 #include "hard-reg-set.h"
45 #include "statistics.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
56 #include "insn-codes.h"
61 #include "langhooks.h"
63 #include "common/common-target.h"
66 static rtx
break_out_memory_refs (rtx
);
69 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
72 trunc_int_for_mode (HOST_WIDE_INT c
, machine_mode mode
)
74 int width
= GET_MODE_PRECISION (mode
);
76 /* You want to truncate to a _what_? */
77 gcc_assert (SCALAR_INT_MODE_P (mode
)
78 || POINTER_BOUNDS_MODE_P (mode
));
80 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
82 return c
& 1 ? STORE_FLAG_VALUE
: 0;
84 /* Sign-extend for the requested mode. */
86 if (width
< HOST_BITS_PER_WIDE_INT
)
88 HOST_WIDE_INT sign
= 1;
98 /* Return an rtx for the sum of X and the integer C, given that X has
99 mode MODE. INPLACE is true if X can be modified inplace or false
100 if it must be treated as immutable. */
103 plus_constant (machine_mode mode
, rtx x
, HOST_WIDE_INT c
,
109 int all_constant
= 0;
111 gcc_assert (GET_MODE (x
) == VOIDmode
|| GET_MODE (x
) == mode
);
123 CASE_CONST_SCALAR_INT
:
124 return immed_wide_int_const (wi::add (std::make_pair (x
, mode
), c
),
127 /* If this is a reference to the constant pool, try replacing it with
128 a reference to a new constant. If the resulting address isn't
129 valid, don't return it because we have no way to validize it. */
130 if (GET_CODE (XEXP (x
, 0)) == SYMBOL_REF
131 && CONSTANT_POOL_ADDRESS_P (XEXP (x
, 0)))
133 tem
= plus_constant (mode
, get_pool_constant (XEXP (x
, 0)), c
);
134 tem
= force_const_mem (GET_MODE (x
), tem
);
135 /* Targets may disallow some constants in the constant pool, thus
136 force_const_mem may return NULL_RTX. */
137 if (tem
&& memory_address_p (GET_MODE (tem
), XEXP (tem
, 0)))
143 /* If adding to something entirely constant, set a flag
144 so that we can add a CONST around the result. */
145 if (inplace
&& shared_const_p (x
))
157 /* The interesting case is adding the integer to a sum. Look
158 for constant term in the sum and combine with C. For an
159 integer constant term or a constant term that is not an
160 explicit integer, we combine or group them together anyway.
162 We may not immediately return from the recursive call here, lest
163 all_constant gets lost. */
165 if (CONSTANT_P (XEXP (x
, 1)))
167 rtx term
= plus_constant (mode
, XEXP (x
, 1), c
, inplace
);
168 if (term
== const0_rtx
)
173 x
= gen_rtx_PLUS (mode
, XEXP (x
, 0), term
);
176 else if (rtx
*const_loc
= find_constant_term_loc (&y
))
180 /* We need to be careful since X may be shared and we can't
181 modify it in place. */
183 const_loc
= find_constant_term_loc (&x
);
185 *const_loc
= plus_constant (mode
, *const_loc
, c
, true);
195 x
= gen_rtx_PLUS (mode
, x
, gen_int_mode (c
, mode
));
197 if (GET_CODE (x
) == SYMBOL_REF
|| GET_CODE (x
) == LABEL_REF
)
199 else if (all_constant
)
200 return gen_rtx_CONST (mode
, x
);
205 /* If X is a sum, return a new sum like X but lacking any constant terms.
206 Add all the removed constant terms into *CONSTPTR.
207 X itself is not altered. The result != X if and only if
208 it is not isomorphic to X. */
211 eliminate_constant_term (rtx x
, rtx
*constptr
)
216 if (GET_CODE (x
) != PLUS
)
219 /* First handle constants appearing at this level explicitly. */
220 if (CONST_INT_P (XEXP (x
, 1))
221 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
), *constptr
,
223 && CONST_INT_P (tem
))
226 return eliminate_constant_term (XEXP (x
, 0), constptr
);
230 x0
= eliminate_constant_term (XEXP (x
, 0), &tem
);
231 x1
= eliminate_constant_term (XEXP (x
, 1), &tem
);
232 if ((x1
!= XEXP (x
, 1) || x0
!= XEXP (x
, 0))
233 && 0 != (tem
= simplify_binary_operation (PLUS
, GET_MODE (x
),
235 && CONST_INT_P (tem
))
238 return gen_rtx_PLUS (GET_MODE (x
), x0
, x1
);
245 /* Return a copy of X in which all memory references
246 and all constants that involve symbol refs
247 have been replaced with new temporary registers.
248 Also emit code to load the memory locations and constants
249 into those registers.
251 If X contains no such constants or memory references,
252 X itself (not a copy) is returned.
254 If a constant is found in the address that is not a legitimate constant
255 in an insn, it is left alone in the hope that it might be valid in the
258 X may contain no arithmetic except addition, subtraction and multiplication.
259 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
262 break_out_memory_refs (rtx x
)
265 || (CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
)
266 && GET_MODE (x
) != VOIDmode
))
267 x
= force_reg (GET_MODE (x
), x
);
268 else if (GET_CODE (x
) == PLUS
|| GET_CODE (x
) == MINUS
269 || GET_CODE (x
) == MULT
)
271 rtx op0
= break_out_memory_refs (XEXP (x
, 0));
272 rtx op1
= break_out_memory_refs (XEXP (x
, 1));
274 if (op0
!= XEXP (x
, 0) || op1
!= XEXP (x
, 1))
275 x
= simplify_gen_binary (GET_CODE (x
), GET_MODE (x
), op0
, op1
);
281 /* Given X, a memory address in address space AS' pointer mode, convert it to
282 an address in the address space's address mode, or vice versa (TO_MODE says
283 which way). We take advantage of the fact that pointers are not allowed to
284 overflow by commuting arithmetic operations over conversions so that address
285 arithmetic insns can be used. IN_CONST is true if this conversion is inside
289 convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED
,
290 rtx x
, addr_space_t as ATTRIBUTE_UNUSED
,
291 bool in_const ATTRIBUTE_UNUSED
)
293 #ifndef POINTERS_EXTEND_UNSIGNED
294 gcc_assert (GET_MODE (x
) == to_mode
|| GET_MODE (x
) == VOIDmode
);
296 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
297 machine_mode pointer_mode
, address_mode
, from_mode
;
301 /* If X already has the right mode, just return it. */
302 if (GET_MODE (x
) == to_mode
)
305 pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
306 address_mode
= targetm
.addr_space
.address_mode (as
);
307 from_mode
= to_mode
== pointer_mode
? address_mode
: pointer_mode
;
309 /* Here we handle some special cases. If none of them apply, fall through
310 to the default case. */
311 switch (GET_CODE (x
))
313 CASE_CONST_SCALAR_INT
:
314 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
))
316 else if (POINTERS_EXTEND_UNSIGNED
< 0)
318 else if (POINTERS_EXTEND_UNSIGNED
> 0)
322 temp
= simplify_unary_operation (code
, to_mode
, x
, from_mode
);
328 if ((SUBREG_PROMOTED_VAR_P (x
) || REG_POINTER (SUBREG_REG (x
)))
329 && GET_MODE (SUBREG_REG (x
)) == to_mode
)
330 return SUBREG_REG (x
);
334 temp
= gen_rtx_LABEL_REF (to_mode
, LABEL_REF_LABEL (x
));
335 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
340 temp
= shallow_copy_rtx (x
);
341 PUT_MODE (temp
, to_mode
);
346 return gen_rtx_CONST (to_mode
,
347 convert_memory_address_addr_space_1
348 (to_mode
, XEXP (x
, 0), as
, true));
353 /* For addition we can safely permute the conversion and addition
354 operation if one operand is a constant and converting the constant
355 does not change it or if one operand is a constant and we are
356 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
357 We can always safely permute them if we are making the address
358 narrower. Inside a CONST RTL, this is safe for both pointers
359 zero or sign extended as pointers cannot wrap. */
360 if (GET_MODE_SIZE (to_mode
) < GET_MODE_SIZE (from_mode
)
361 || (GET_CODE (x
) == PLUS
362 && CONST_INT_P (XEXP (x
, 1))
363 && ((in_const
&& POINTERS_EXTEND_UNSIGNED
!= 0)
364 || XEXP (x
, 1) == convert_memory_address_addr_space_1
365 (to_mode
, XEXP (x
, 1), as
, in_const
)
366 || POINTERS_EXTEND_UNSIGNED
< 0)))
367 return gen_rtx_fmt_ee (GET_CODE (x
), to_mode
,
368 convert_memory_address_addr_space_1
369 (to_mode
, XEXP (x
, 0), as
, in_const
),
377 return convert_modes (to_mode
, from_mode
,
378 x
, POINTERS_EXTEND_UNSIGNED
);
379 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
382 /* Given X, a memory address in address space AS' pointer mode, convert it to
383 an address in the address space's address mode, or vice versa (TO_MODE says
384 which way). We take advantage of the fact that pointers are not allowed to
385 overflow by commuting arithmetic operations over conversions so that address
386 arithmetic insns can be used. */
389 convert_memory_address_addr_space (machine_mode to_mode
, rtx x
, addr_space_t as
)
391 return convert_memory_address_addr_space_1 (to_mode
, x
, as
, false);
395 /* Return something equivalent to X but valid as a memory address for something
396 of mode MODE in the named address space AS. When X is not itself valid,
397 this works by copying X or subexpressions of it into registers. */
400 memory_address_addr_space (machine_mode mode
, rtx x
, addr_space_t as
)
403 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
405 x
= convert_memory_address_addr_space (address_mode
, x
, as
);
407 /* By passing constant addresses through registers
408 we get a chance to cse them. */
409 if (! cse_not_expected
&& CONSTANT_P (x
) && CONSTANT_ADDRESS_P (x
))
410 x
= force_reg (address_mode
, x
);
412 /* We get better cse by rejecting indirect addressing at this stage.
413 Let the combiner create indirect addresses where appropriate.
414 For now, generate the code so that the subexpressions useful to share
415 are visible. But not if cse won't be done! */
418 if (! cse_not_expected
&& !REG_P (x
))
419 x
= break_out_memory_refs (x
);
421 /* At this point, any valid address is accepted. */
422 if (memory_address_addr_space_p (mode
, x
, as
))
425 /* If it was valid before but breaking out memory refs invalidated it,
426 use it the old way. */
427 if (memory_address_addr_space_p (mode
, oldx
, as
))
433 /* Perform machine-dependent transformations on X
434 in certain cases. This is not necessary since the code
435 below can handle all possible cases, but machine-dependent
436 transformations can make better code. */
439 x
= targetm
.addr_space
.legitimize_address (x
, oldx
, mode
, as
);
440 if (orig_x
!= x
&& memory_address_addr_space_p (mode
, x
, as
))
444 /* PLUS and MULT can appear in special ways
445 as the result of attempts to make an address usable for indexing.
446 Usually they are dealt with by calling force_operand, below.
447 But a sum containing constant terms is special
448 if removing them makes the sum a valid address:
449 then we generate that address in a register
450 and index off of it. We do this because it often makes
451 shorter code, and because the addresses thus generated
452 in registers often become common subexpressions. */
453 if (GET_CODE (x
) == PLUS
)
455 rtx constant_term
= const0_rtx
;
456 rtx y
= eliminate_constant_term (x
, &constant_term
);
457 if (constant_term
== const0_rtx
458 || ! memory_address_addr_space_p (mode
, y
, as
))
459 x
= force_operand (x
, NULL_RTX
);
462 y
= gen_rtx_PLUS (GET_MODE (x
), copy_to_reg (y
), constant_term
);
463 if (! memory_address_addr_space_p (mode
, y
, as
))
464 x
= force_operand (x
, NULL_RTX
);
470 else if (GET_CODE (x
) == MULT
|| GET_CODE (x
) == MINUS
)
471 x
= force_operand (x
, NULL_RTX
);
473 /* If we have a register that's an invalid address,
474 it must be a hard reg of the wrong class. Copy it to a pseudo. */
478 /* Last resort: copy the value to a register, since
479 the register is a valid address. */
481 x
= force_reg (address_mode
, x
);
486 gcc_assert (memory_address_addr_space_p (mode
, x
, as
));
487 /* If we didn't change the address, we are done. Otherwise, mark
488 a reg as a pointer if we have REG or REG + CONST_INT. */
492 mark_reg_pointer (x
, BITS_PER_UNIT
);
493 else if (GET_CODE (x
) == PLUS
494 && REG_P (XEXP (x
, 0))
495 && CONST_INT_P (XEXP (x
, 1)))
496 mark_reg_pointer (XEXP (x
, 0), BITS_PER_UNIT
);
498 /* OLDX may have been the address on a temporary. Update the address
499 to indicate that X is now used. */
500 update_temp_slot_address (oldx
, x
);
505 /* If REF is a MEM with an invalid address, change it into a valid address.
506 Pass through anything else unchanged. REF must be an unshared rtx and
507 the function may modify it in-place. */
510 validize_mem (rtx ref
)
514 ref
= use_anchored_address (ref
);
515 if (memory_address_addr_space_p (GET_MODE (ref
), XEXP (ref
, 0),
516 MEM_ADDR_SPACE (ref
)))
519 return replace_equiv_address (ref
, XEXP (ref
, 0), true);
522 /* If X is a memory reference to a member of an object block, try rewriting
523 it to use an anchor instead. Return the new memory reference on success
524 and the old one on failure. */
527 use_anchored_address (rtx x
)
530 HOST_WIDE_INT offset
;
533 if (!flag_section_anchors
)
539 /* Split the address into a base and offset. */
542 if (GET_CODE (base
) == CONST
543 && GET_CODE (XEXP (base
, 0)) == PLUS
544 && CONST_INT_P (XEXP (XEXP (base
, 0), 1)))
546 offset
+= INTVAL (XEXP (XEXP (base
, 0), 1));
547 base
= XEXP (XEXP (base
, 0), 0);
550 /* Check whether BASE is suitable for anchors. */
551 if (GET_CODE (base
) != SYMBOL_REF
552 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base
)
553 || SYMBOL_REF_ANCHOR_P (base
)
554 || SYMBOL_REF_BLOCK (base
) == NULL
555 || !targetm
.use_anchors_for_symbol_p (base
))
558 /* Decide where BASE is going to be. */
559 place_block_symbol (base
);
561 /* Get the anchor we need to use. */
562 offset
+= SYMBOL_REF_BLOCK_OFFSET (base
);
563 base
= get_section_anchor (SYMBOL_REF_BLOCK (base
), offset
,
564 SYMBOL_REF_TLS_MODEL (base
));
566 /* Work out the offset from the anchor. */
567 offset
-= SYMBOL_REF_BLOCK_OFFSET (base
);
569 /* If we're going to run a CSE pass, force the anchor into a register.
570 We will then be able to reuse registers for several accesses, if the
571 target costs say that that's worthwhile. */
572 mode
= GET_MODE (base
);
573 if (!cse_not_expected
)
574 base
= force_reg (mode
, base
);
576 return replace_equiv_address (x
, plus_constant (mode
, base
, offset
));
579 /* Copy the value or contents of X to a new temp reg and return that reg. */
584 rtx temp
= gen_reg_rtx (GET_MODE (x
));
586 /* If not an operand, must be an address with PLUS and MULT so
587 do the computation. */
588 if (! general_operand (x
, VOIDmode
))
589 x
= force_operand (x
, temp
);
592 emit_move_insn (temp
, x
);
597 /* Like copy_to_reg but always give the new register mode Pmode
598 in case X is a constant. */
601 copy_addr_to_reg (rtx x
)
603 return copy_to_mode_reg (Pmode
, x
);
606 /* Like copy_to_reg but always give the new register mode MODE
607 in case X is a constant. */
610 copy_to_mode_reg (machine_mode mode
, rtx x
)
612 rtx temp
= gen_reg_rtx (mode
);
614 /* If not an operand, must be an address with PLUS and MULT so
615 do the computation. */
616 if (! general_operand (x
, VOIDmode
))
617 x
= force_operand (x
, temp
);
619 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
621 emit_move_insn (temp
, x
);
625 /* Load X into a register if it is not already one.
626 Use mode MODE for the register.
627 X should be valid for mode MODE, but it may be a constant which
628 is valid for all integer modes; that's why caller must specify MODE.
630 The caller must not alter the value in the register we return,
631 since we mark it as a "constant" register. */
634 force_reg (machine_mode mode
, rtx x
)
642 if (general_operand (x
, mode
))
644 temp
= gen_reg_rtx (mode
);
645 insn
= emit_move_insn (temp
, x
);
649 temp
= force_operand (x
, NULL_RTX
);
651 insn
= get_last_insn ();
654 rtx temp2
= gen_reg_rtx (mode
);
655 insn
= emit_move_insn (temp2
, temp
);
660 /* Let optimizers know that TEMP's value never changes
661 and that X can be substituted for it. Don't get confused
662 if INSN set something else (such as a SUBREG of TEMP). */
664 && (set
= single_set (insn
)) != 0
665 && SET_DEST (set
) == temp
666 && ! rtx_equal_p (x
, SET_SRC (set
)))
667 set_unique_reg_note (insn
, REG_EQUAL
, x
);
669 /* Let optimizers know that TEMP is a pointer, and if so, the
670 known alignment of that pointer. */
673 if (GET_CODE (x
) == SYMBOL_REF
)
675 align
= BITS_PER_UNIT
;
676 if (SYMBOL_REF_DECL (x
) && DECL_P (SYMBOL_REF_DECL (x
)))
677 align
= DECL_ALIGN (SYMBOL_REF_DECL (x
));
679 else if (GET_CODE (x
) == LABEL_REF
)
680 align
= BITS_PER_UNIT
;
681 else if (GET_CODE (x
) == CONST
682 && GET_CODE (XEXP (x
, 0)) == PLUS
683 && GET_CODE (XEXP (XEXP (x
, 0), 0)) == SYMBOL_REF
684 && CONST_INT_P (XEXP (XEXP (x
, 0), 1)))
686 rtx s
= XEXP (XEXP (x
, 0), 0);
687 rtx c
= XEXP (XEXP (x
, 0), 1);
691 if (SYMBOL_REF_DECL (s
) && DECL_P (SYMBOL_REF_DECL (s
)))
692 sa
= DECL_ALIGN (SYMBOL_REF_DECL (s
));
698 ca
= ctz_hwi (INTVAL (c
)) * BITS_PER_UNIT
;
699 align
= MIN (sa
, ca
);
703 if (align
|| (MEM_P (x
) && MEM_POINTER (x
)))
704 mark_reg_pointer (temp
, align
);
710 /* If X is a memory ref, copy its contents to a new temp reg and return
711 that reg. Otherwise, return X. */
714 force_not_mem (rtx x
)
718 if (!MEM_P (x
) || GET_MODE (x
) == BLKmode
)
721 temp
= gen_reg_rtx (GET_MODE (x
));
724 REG_POINTER (temp
) = 1;
726 emit_move_insn (temp
, x
);
730 /* Copy X to TARGET (if it's nonzero and a reg)
731 or to a new temp reg and return that reg.
732 MODE is the mode to use for X in case it is a constant. */
735 copy_to_suggested_reg (rtx x
, rtx target
, machine_mode mode
)
739 if (target
&& REG_P (target
))
742 temp
= gen_reg_rtx (mode
);
744 emit_move_insn (temp
, x
);
748 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
749 PUNSIGNEDP points to the signedness of the type and may be adjusted
750 to show what signedness to use on extension operations.
752 FOR_RETURN is nonzero if the caller is promoting the return value
753 of FNDECL, else it is for promoting args. */
756 promote_function_mode (const_tree type
, machine_mode mode
, int *punsignedp
,
757 const_tree funtype
, int for_return
)
759 /* Called without a type node for a libcall. */
760 if (type
== NULL_TREE
)
762 if (INTEGRAL_MODE_P (mode
))
763 return targetm
.calls
.promote_function_mode (NULL_TREE
, mode
,
770 switch (TREE_CODE (type
))
772 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
773 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
774 case POINTER_TYPE
: case REFERENCE_TYPE
:
775 return targetm
.calls
.promote_function_mode (type
, mode
, punsignedp
, funtype
,
782 /* Return the mode to use to store a scalar of TYPE and MODE.
783 PUNSIGNEDP points to the signedness of the type and may be adjusted
784 to show what signedness to use on extension operations. */
787 promote_mode (const_tree type ATTRIBUTE_UNUSED
, machine_mode mode
,
788 int *punsignedp ATTRIBUTE_UNUSED
)
795 /* For libcalls this is invoked without TYPE from the backends
796 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
798 if (type
== NULL_TREE
)
801 /* FIXME: this is the same logic that was there until GCC 4.4, but we
802 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
803 is not defined. The affected targets are M32C, S390, SPARC. */
805 code
= TREE_CODE (type
);
806 unsignedp
= *punsignedp
;
810 case INTEGER_TYPE
: case ENUMERAL_TYPE
: case BOOLEAN_TYPE
:
811 case REAL_TYPE
: case OFFSET_TYPE
: case FIXED_POINT_TYPE
:
812 PROMOTE_MODE (mode
, unsignedp
, type
);
813 *punsignedp
= unsignedp
;
817 #ifdef POINTERS_EXTEND_UNSIGNED
820 *punsignedp
= POINTERS_EXTEND_UNSIGNED
;
821 return targetm
.addr_space
.address_mode
822 (TYPE_ADDR_SPACE (TREE_TYPE (type
)));
835 /* Use one of promote_mode or promote_function_mode to find the promoted
836 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
837 of DECL after promotion. */
840 promote_decl_mode (const_tree decl
, int *punsignedp
)
842 tree type
= TREE_TYPE (decl
);
843 int unsignedp
= TYPE_UNSIGNED (type
);
844 machine_mode mode
= DECL_MODE (decl
);
847 if (TREE_CODE (decl
) == RESULT_DECL
848 || TREE_CODE (decl
) == PARM_DECL
)
849 pmode
= promote_function_mode (type
, mode
, &unsignedp
,
850 TREE_TYPE (current_function_decl
), 2);
852 pmode
= promote_mode (type
, mode
, &unsignedp
);
855 *punsignedp
= unsignedp
;
860 /* Controls the behaviour of {anti_,}adjust_stack. */
861 static bool suppress_reg_args_size
;
863 /* A helper for adjust_stack and anti_adjust_stack. */
866 adjust_stack_1 (rtx adjust
, bool anti_p
)
871 /* Hereafter anti_p means subtract_p. */
872 if (!STACK_GROWS_DOWNWARD
)
875 temp
= expand_binop (Pmode
,
876 anti_p
? sub_optab
: add_optab
,
877 stack_pointer_rtx
, adjust
, stack_pointer_rtx
, 0,
880 if (temp
!= stack_pointer_rtx
)
881 insn
= emit_move_insn (stack_pointer_rtx
, temp
);
884 insn
= get_last_insn ();
885 temp
= single_set (insn
);
886 gcc_assert (temp
!= NULL
&& SET_DEST (temp
) == stack_pointer_rtx
);
889 if (!suppress_reg_args_size
)
890 add_reg_note (insn
, REG_ARGS_SIZE
, GEN_INT (stack_pointer_delta
));
893 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
894 This pops when ADJUST is positive. ADJUST need not be constant. */
897 adjust_stack (rtx adjust
)
899 if (adjust
== const0_rtx
)
902 /* We expect all variable sized adjustments to be multiple of
903 PREFERRED_STACK_BOUNDARY. */
904 if (CONST_INT_P (adjust
))
905 stack_pointer_delta
-= INTVAL (adjust
);
907 adjust_stack_1 (adjust
, false);
910 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
911 This pushes when ADJUST is positive. ADJUST need not be constant. */
914 anti_adjust_stack (rtx adjust
)
916 if (adjust
== const0_rtx
)
919 /* We expect all variable sized adjustments to be multiple of
920 PREFERRED_STACK_BOUNDARY. */
921 if (CONST_INT_P (adjust
))
922 stack_pointer_delta
+= INTVAL (adjust
);
924 adjust_stack_1 (adjust
, true);
927 /* Round the size of a block to be pushed up to the boundary required
928 by this machine. SIZE is the desired size, which need not be constant. */
931 round_push (rtx size
)
933 rtx align_rtx
, alignm1_rtx
;
935 if (!SUPPORTS_STACK_ALIGNMENT
936 || crtl
->preferred_stack_boundary
== MAX_SUPPORTED_STACK_ALIGNMENT
)
938 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
943 if (CONST_INT_P (size
))
945 HOST_WIDE_INT new_size
= (INTVAL (size
) + align
- 1) / align
* align
;
947 if (INTVAL (size
) != new_size
)
948 size
= GEN_INT (new_size
);
952 align_rtx
= GEN_INT (align
);
953 alignm1_rtx
= GEN_INT (align
- 1);
957 /* If crtl->preferred_stack_boundary might still grow, use
958 virtual_preferred_stack_boundary_rtx instead. This will be
959 substituted by the right value in vregs pass and optimized
961 align_rtx
= virtual_preferred_stack_boundary_rtx
;
962 alignm1_rtx
= force_operand (plus_constant (Pmode
, align_rtx
, -1),
966 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
967 but we know it can't. So add ourselves and then do
969 size
= expand_binop (Pmode
, add_optab
, size
, alignm1_rtx
,
970 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
971 size
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, size
, align_rtx
,
973 size
= expand_mult (Pmode
, size
, align_rtx
, NULL_RTX
, 1);
978 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
979 to a previously-created save area. If no save area has been allocated,
980 this function will allocate one. If a save area is specified, it
981 must be of the proper mode. */
984 emit_stack_save (enum save_level save_level
, rtx
*psave
)
987 /* The default is that we use a move insn and save in a Pmode object. */
988 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn_uncast
;
989 machine_mode mode
= STACK_SAVEAREA_MODE (save_level
);
991 /* See if this machine has anything special to do for this kind of save. */
994 #ifdef HAVE_save_stack_block
996 if (HAVE_save_stack_block
)
997 fcn
= gen_save_stack_block
;
1000 #ifdef HAVE_save_stack_function
1002 if (HAVE_save_stack_function
)
1003 fcn
= gen_save_stack_function
;
1006 #ifdef HAVE_save_stack_nonlocal
1008 if (HAVE_save_stack_nonlocal
)
1009 fcn
= gen_save_stack_nonlocal
;
1016 /* If there is no save area and we have to allocate one, do so. Otherwise
1017 verify the save area is the proper mode. */
1021 if (mode
!= VOIDmode
)
1023 if (save_level
== SAVE_NONLOCAL
)
1024 *psave
= sa
= assign_stack_local (mode
, GET_MODE_SIZE (mode
), 0);
1026 *psave
= sa
= gen_reg_rtx (mode
);
1030 do_pending_stack_adjust ();
1032 sa
= validize_mem (sa
);
1033 emit_insn (fcn (sa
, stack_pointer_rtx
));
1036 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1037 area made by emit_stack_save. If it is zero, we have nothing to do. */
1040 emit_stack_restore (enum save_level save_level
, rtx sa
)
1042 /* The default is that we use a move insn. */
1043 rtx (*fcn
) (rtx
, rtx
) = gen_move_insn_uncast
;
1045 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1046 STACK_POINTER and HARD_FRAME_POINTER.
1047 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1048 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1049 aligned variables, which is reflected in ix86_can_eliminate.
1050 We normally still have the realigned STACK_POINTER that we can use.
1051 But if there is a stack restore still present at reload, it can trigger
1052 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1053 FRAME_POINTER into a hard reg.
1054 To prevent this situation, we force need_drap if we emit a stack
1056 if (SUPPORTS_STACK_ALIGNMENT
)
1057 crtl
->need_drap
= true;
1059 /* See if this machine has anything special to do for this kind of save. */
1062 #ifdef HAVE_restore_stack_block
1064 if (HAVE_restore_stack_block
)
1065 fcn
= gen_restore_stack_block
;
1068 #ifdef HAVE_restore_stack_function
1070 if (HAVE_restore_stack_function
)
1071 fcn
= gen_restore_stack_function
;
1074 #ifdef HAVE_restore_stack_nonlocal
1076 if (HAVE_restore_stack_nonlocal
)
1077 fcn
= gen_restore_stack_nonlocal
;
1086 sa
= validize_mem (sa
);
1087 /* These clobbers prevent the scheduler from moving
1088 references to variable arrays below the code
1089 that deletes (pops) the arrays. */
1090 emit_clobber (gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
)));
1091 emit_clobber (gen_rtx_MEM (BLKmode
, stack_pointer_rtx
));
1094 discard_pending_stack_adjust ();
1096 emit_insn (fcn (stack_pointer_rtx
, sa
));
1099 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1100 function. This should be called whenever we allocate or deallocate
1101 dynamic stack space. */
1104 update_nonlocal_goto_save_area (void)
1109 /* The nonlocal_goto_save_area object is an array of N pointers. The
1110 first one is used for the frame pointer save; the rest are sized by
1111 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1112 of the stack save area slots. */
1113 t_save
= build4 (ARRAY_REF
,
1114 TREE_TYPE (TREE_TYPE (cfun
->nonlocal_goto_save_area
)),
1115 cfun
->nonlocal_goto_save_area
,
1116 integer_one_node
, NULL_TREE
, NULL_TREE
);
1117 r_save
= expand_expr (t_save
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
1119 emit_stack_save (SAVE_NONLOCAL
, &r_save
);
1122 /* Record a new stack level for the current function. This should be called
1123 whenever we allocate or deallocate dynamic stack space. */
1126 record_new_stack_level (void)
1128 /* Record the new stack level for nonlocal gotos. */
1129 if (cfun
->nonlocal_goto_save_area
)
1130 update_nonlocal_goto_save_area ();
1132 /* Record the new stack level for SJLJ exceptions. */
1133 if (targetm_common
.except_unwind_info (&global_options
) == UI_SJLJ
)
1134 update_sjlj_context ();
1137 /* Return an rtx representing the address of an area of memory dynamically
1138 pushed on the stack.
1140 Any required stack pointer alignment is preserved.
1142 SIZE is an rtx representing the size of the area.
1144 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1145 parameter may be zero. If so, a proper value will be extracted
1146 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1148 REQUIRED_ALIGN is the alignment (in bits) required for the region
1151 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1152 stack space allocated by the generated code cannot be added with itself
1153 in the course of the execution of the function. It is always safe to
1154 pass FALSE here and the following criterion is sufficient in order to
1155 pass TRUE: every path in the CFG that starts at the allocation point and
1156 loops to it executes the associated deallocation code. */
1159 allocate_dynamic_stack_space (rtx size
, unsigned size_align
,
1160 unsigned required_align
, bool cannot_accumulate
)
1162 HOST_WIDE_INT stack_usage_size
= -1;
1163 rtx_code_label
*final_label
;
1164 rtx final_target
, target
;
1165 unsigned extra_align
= 0;
1168 /* If we're asking for zero bytes, it doesn't matter what we point
1169 to since we can't dereference it. But return a reasonable
1171 if (size
== const0_rtx
)
1172 return virtual_stack_dynamic_rtx
;
1174 /* Otherwise, show we're calling alloca or equivalent. */
1175 cfun
->calls_alloca
= 1;
1177 /* If stack usage info is requested, look into the size we are passed.
1178 We need to do so this early to avoid the obfuscation that may be
1179 introduced later by the various alignment operations. */
1180 if (flag_stack_usage_info
)
1182 if (CONST_INT_P (size
))
1183 stack_usage_size
= INTVAL (size
);
1184 else if (REG_P (size
))
1186 /* Look into the last emitted insn and see if we can deduce
1187 something for the register. */
1190 insn
= get_last_insn ();
1191 if ((set
= single_set (insn
)) && rtx_equal_p (SET_DEST (set
), size
))
1193 if (CONST_INT_P (SET_SRC (set
)))
1194 stack_usage_size
= INTVAL (SET_SRC (set
));
1195 else if ((note
= find_reg_equal_equiv_note (insn
))
1196 && CONST_INT_P (XEXP (note
, 0)))
1197 stack_usage_size
= INTVAL (XEXP (note
, 0));
1201 /* If the size is not constant, we can't say anything. */
1202 if (stack_usage_size
== -1)
1204 current_function_has_unbounded_dynamic_stack_size
= 1;
1205 stack_usage_size
= 0;
1209 /* Ensure the size is in the proper mode. */
1210 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1211 size
= convert_to_mode (Pmode
, size
, 1);
1213 /* Adjust SIZE_ALIGN, if needed. */
1214 if (CONST_INT_P (size
))
1216 unsigned HOST_WIDE_INT lsb
;
1218 lsb
= INTVAL (size
);
1221 /* Watch out for overflow truncating to "unsigned". */
1222 if (lsb
> UINT_MAX
/ BITS_PER_UNIT
)
1223 size_align
= 1u << (HOST_BITS_PER_INT
- 1);
1225 size_align
= (unsigned)lsb
* BITS_PER_UNIT
;
1227 else if (size_align
< BITS_PER_UNIT
)
1228 size_align
= BITS_PER_UNIT
;
1230 /* We can't attempt to minimize alignment necessary, because we don't
1231 know the final value of preferred_stack_boundary yet while executing
1233 if (crtl
->preferred_stack_boundary
< PREFERRED_STACK_BOUNDARY
)
1234 crtl
->preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
1236 /* We will need to ensure that the address we return is aligned to
1237 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1238 always know its final value at this point in the compilation (it
1239 might depend on the size of the outgoing parameter lists, for
1240 example), so we must align the value to be returned in that case.
1241 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1242 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1243 We must also do an alignment operation on the returned value if
1244 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1246 If we have to align, we must leave space in SIZE for the hole
1247 that might result from the alignment operation. */
1249 must_align
= (crtl
->preferred_stack_boundary
< required_align
);
1252 if (required_align
> PREFERRED_STACK_BOUNDARY
)
1253 extra_align
= PREFERRED_STACK_BOUNDARY
;
1254 else if (required_align
> STACK_BOUNDARY
)
1255 extra_align
= STACK_BOUNDARY
;
1257 extra_align
= BITS_PER_UNIT
;
1260 /* ??? STACK_POINTER_OFFSET is always defined now. */
1261 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1263 extra_align
= BITS_PER_UNIT
;
1268 unsigned extra
= (required_align
- extra_align
) / BITS_PER_UNIT
;
1270 size
= plus_constant (Pmode
, size
, extra
);
1271 size
= force_operand (size
, NULL_RTX
);
1273 if (flag_stack_usage_info
)
1274 stack_usage_size
+= extra
;
1276 if (extra
&& size_align
> extra_align
)
1277 size_align
= extra_align
;
1280 /* Round the size to a multiple of the required stack alignment.
1281 Since the stack if presumed to be rounded before this allocation,
1282 this will maintain the required alignment.
1284 If the stack grows downward, we could save an insn by subtracting
1285 SIZE from the stack pointer and then aligning the stack pointer.
1286 The problem with this is that the stack pointer may be unaligned
1287 between the execution of the subtraction and alignment insns and
1288 some machines do not allow this. Even on those that do, some
1289 signal handlers malfunction if a signal should occur between those
1290 insns. Since this is an extremely rare event, we have no reliable
1291 way of knowing which systems have this problem. So we avoid even
1292 momentarily mis-aligning the stack. */
1293 if (size_align
% MAX_SUPPORTED_STACK_ALIGNMENT
!= 0)
1295 size
= round_push (size
);
1297 if (flag_stack_usage_info
)
1299 int align
= crtl
->preferred_stack_boundary
/ BITS_PER_UNIT
;
1300 stack_usage_size
= (stack_usage_size
+ align
- 1) / align
* align
;
1304 target
= gen_reg_rtx (Pmode
);
1306 /* The size is supposed to be fully adjusted at this point so record it
1307 if stack usage info is requested. */
1308 if (flag_stack_usage_info
)
1310 current_function_dynamic_stack_size
+= stack_usage_size
;
1312 /* ??? This is gross but the only safe stance in the absence
1313 of stack usage oriented flow analysis. */
1314 if (!cannot_accumulate
)
1315 current_function_has_unbounded_dynamic_stack_size
= 1;
1319 final_target
= NULL_RTX
;
1321 /* If we are splitting the stack, we need to ask the backend whether
1322 there is enough room on the current stack. If there isn't, or if
1323 the backend doesn't know how to tell is, then we need to call a
1324 function to allocate memory in some other way. This memory will
1325 be released when we release the current stack segment. The
1326 effect is that stack allocation becomes less efficient, but at
1327 least it doesn't cause a stack overflow. */
1328 if (flag_split_stack
)
1330 rtx_code_label
*available_label
;
1331 rtx ask
, space
, func
;
1333 available_label
= NULL
;
1335 #ifdef HAVE_split_stack_space_check
1336 if (HAVE_split_stack_space_check
)
1338 available_label
= gen_label_rtx ();
1340 /* This instruction will branch to AVAILABLE_LABEL if there
1341 are SIZE bytes available on the stack. */
1342 emit_insn (gen_split_stack_space_check (size
, available_label
));
1346 /* The __morestack_allocate_stack_space function will allocate
1347 memory using malloc. If the alignment of the memory returned
1348 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1349 make sure we allocate enough space. */
1350 if (MALLOC_ABI_ALIGNMENT
>= required_align
)
1354 ask
= expand_binop (Pmode
, add_optab
, size
,
1355 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1357 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1361 func
= init_one_libfunc ("__morestack_allocate_stack_space");
1363 space
= emit_library_call_value (func
, target
, LCT_NORMAL
, Pmode
,
1366 if (available_label
== NULL_RTX
)
1369 final_target
= gen_reg_rtx (Pmode
);
1371 emit_move_insn (final_target
, space
);
1373 final_label
= gen_label_rtx ();
1374 emit_jump (final_label
);
1376 emit_label (available_label
);
1379 do_pending_stack_adjust ();
1381 /* We ought to be called always on the toplevel and stack ought to be aligned
1383 gcc_assert (!(stack_pointer_delta
1384 % (PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
)));
1386 /* If needed, check that we have the required amount of stack. Take into
1387 account what has already been checked. */
1388 if (STACK_CHECK_MOVING_SP
)
1390 else if (flag_stack_check
== GENERIC_STACK_CHECK
)
1391 probe_stack_range (STACK_OLD_CHECK_PROTECT
+ STACK_CHECK_MAX_FRAME_SIZE
,
1393 else if (flag_stack_check
== STATIC_BUILTIN_STACK_CHECK
)
1394 probe_stack_range (STACK_CHECK_PROTECT
, size
);
1396 /* Don't let anti_adjust_stack emit notes. */
1397 suppress_reg_args_size
= true;
1399 /* Perform the required allocation from the stack. Some systems do
1400 this differently than simply incrementing/decrementing from the
1401 stack pointer, such as acquiring the space by calling malloc(). */
1402 #ifdef HAVE_allocate_stack
1403 if (HAVE_allocate_stack
)
1405 struct expand_operand ops
[2];
1406 /* We don't have to check against the predicate for operand 0 since
1407 TARGET is known to be a pseudo of the proper mode, which must
1408 be valid for the operand. */
1409 create_fixed_operand (&ops
[0], target
);
1410 create_convert_operand_to (&ops
[1], size
, STACK_SIZE_MODE
, true);
1411 expand_insn (CODE_FOR_allocate_stack
, 2, ops
);
1416 int saved_stack_pointer_delta
;
1418 if (!STACK_GROWS_DOWNWARD
)
1419 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1421 /* Check stack bounds if necessary. */
1422 if (crtl
->limit_stack
)
1425 rtx_code_label
*space_available
= gen_label_rtx ();
1426 if (STACK_GROWS_DOWNWARD
)
1427 available
= expand_binop (Pmode
, sub_optab
,
1428 stack_pointer_rtx
, stack_limit_rtx
,
1429 NULL_RTX
, 1, OPTAB_WIDEN
);
1431 available
= expand_binop (Pmode
, sub_optab
,
1432 stack_limit_rtx
, stack_pointer_rtx
,
1433 NULL_RTX
, 1, OPTAB_WIDEN
);
1435 emit_cmp_and_jump_insns (available
, size
, GEU
, NULL_RTX
, Pmode
, 1,
1439 emit_insn (gen_trap ());
1442 error ("stack limits not supported on this target");
1444 emit_label (space_available
);
1447 saved_stack_pointer_delta
= stack_pointer_delta
;
1449 if (flag_stack_check
&& STACK_CHECK_MOVING_SP
)
1450 anti_adjust_stack_and_probe (size
, false);
1452 anti_adjust_stack (size
);
1454 /* Even if size is constant, don't modify stack_pointer_delta.
1455 The constant size alloca should preserve
1456 crtl->preferred_stack_boundary alignment. */
1457 stack_pointer_delta
= saved_stack_pointer_delta
;
1459 if (STACK_GROWS_DOWNWARD
)
1460 emit_move_insn (target
, virtual_stack_dynamic_rtx
);
1463 suppress_reg_args_size
= false;
1465 /* Finish up the split stack handling. */
1466 if (final_label
!= NULL_RTX
)
1468 gcc_assert (flag_split_stack
);
1469 emit_move_insn (final_target
, target
);
1470 emit_label (final_label
);
1471 target
= final_target
;
1476 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1477 but we know it can't. So add ourselves and then do
1479 target
= expand_binop (Pmode
, add_optab
, target
,
1480 gen_int_mode (required_align
/ BITS_PER_UNIT
- 1,
1482 NULL_RTX
, 1, OPTAB_LIB_WIDEN
);
1483 target
= expand_divmod (0, TRUNC_DIV_EXPR
, Pmode
, target
,
1484 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1487 target
= expand_mult (Pmode
, target
,
1488 gen_int_mode (required_align
/ BITS_PER_UNIT
,
1493 /* Now that we've committed to a return value, mark its alignment. */
1494 mark_reg_pointer (target
, required_align
);
1496 /* Record the new stack level. */
1497 record_new_stack_level ();
1502 /* A front end may want to override GCC's stack checking by providing a
1503 run-time routine to call to check the stack, so provide a mechanism for
1504 calling that routine. */
1506 static GTY(()) rtx stack_check_libfunc
;
1509 set_stack_check_libfunc (const char *libfunc_name
)
1511 gcc_assert (stack_check_libfunc
== NULL_RTX
);
1512 stack_check_libfunc
= gen_rtx_SYMBOL_REF (Pmode
, libfunc_name
);
1515 /* Emit one stack probe at ADDRESS, an address within the stack. */
1518 emit_stack_probe (rtx address
)
1520 #ifdef HAVE_probe_stack_address
1521 if (HAVE_probe_stack_address
)
1522 emit_insn (gen_probe_stack_address (address
));
1526 rtx memref
= gen_rtx_MEM (word_mode
, address
);
1528 MEM_VOLATILE_P (memref
) = 1;
1530 /* See if we have an insn to probe the stack. */
1531 #ifdef HAVE_probe_stack
1532 if (HAVE_probe_stack
)
1533 emit_insn (gen_probe_stack (memref
));
1536 emit_move_insn (memref
, const0_rtx
);
1540 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1541 FIRST is a constant and size is a Pmode RTX. These are offsets from
1542 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1543 or subtract them from the stack pointer. */
1545 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1547 #if STACK_GROWS_DOWNWARD
1548 #define STACK_GROW_OP MINUS
1549 #define STACK_GROW_OPTAB sub_optab
1550 #define STACK_GROW_OFF(off) -(off)
1552 #define STACK_GROW_OP PLUS
1553 #define STACK_GROW_OPTAB add_optab
1554 #define STACK_GROW_OFF(off) (off)
1558 probe_stack_range (HOST_WIDE_INT first
, rtx size
)
1560 /* First ensure SIZE is Pmode. */
1561 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1562 size
= convert_to_mode (Pmode
, size
, 1);
1564 /* Next see if we have a function to check the stack. */
1565 if (stack_check_libfunc
)
1567 rtx addr
= memory_address (Pmode
,
1568 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1570 plus_constant (Pmode
,
1572 emit_library_call (stack_check_libfunc
, LCT_NORMAL
, VOIDmode
, 1, addr
,
1576 /* Next see if we have an insn to check the stack. */
1577 #ifdef HAVE_check_stack
1578 else if (HAVE_check_stack
)
1580 struct expand_operand ops
[1];
1581 rtx addr
= memory_address (Pmode
,
1582 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1584 plus_constant (Pmode
,
1587 create_input_operand (&ops
[0], addr
, Pmode
);
1588 success
= maybe_expand_insn (CODE_FOR_check_stack
, 1, ops
);
1589 gcc_assert (success
);
1593 /* Otherwise we have to generate explicit probes. If we have a constant
1594 small number of them to generate, that's the easy case. */
1595 else if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1597 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1600 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1601 it exceeds SIZE. If only one probe is needed, this will not
1602 generate any code. Then probe at FIRST + SIZE. */
1603 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1605 addr
= memory_address (Pmode
,
1606 plus_constant (Pmode
, stack_pointer_rtx
,
1607 STACK_GROW_OFF (first
+ i
)));
1608 emit_stack_probe (addr
);
1611 addr
= memory_address (Pmode
,
1612 plus_constant (Pmode
, stack_pointer_rtx
,
1613 STACK_GROW_OFF (first
+ isize
)));
1614 emit_stack_probe (addr
);
1617 /* In the variable case, do the same as above, but in a loop. Note that we
1618 must be extra careful with variables wrapping around because we might be
1619 at the very top (or the very bottom) of the address space and we have to
1620 be able to handle this case properly; in particular, we use an equality
1621 test for the loop condition. */
1624 rtx rounded_size
, rounded_size_op
, test_addr
, last_addr
, temp
;
1625 rtx_code_label
*loop_lab
= gen_label_rtx ();
1626 rtx_code_label
*end_lab
= gen_label_rtx ();
1628 /* Step 1: round SIZE to the previous multiple of the interval. */
1630 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1632 = simplify_gen_binary (AND
, Pmode
, size
,
1633 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
1634 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1637 /* Step 2: compute initial and final value of the loop counter. */
1639 /* TEST_ADDR = SP + FIRST. */
1640 test_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1642 gen_int_mode (first
, Pmode
)),
1645 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1646 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1648 rounded_size_op
), NULL_RTX
);
1653 while (TEST_ADDR != LAST_ADDR)
1655 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1659 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1660 until it is equal to ROUNDED_SIZE. */
1662 emit_label (loop_lab
);
1664 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1665 emit_cmp_and_jump_insns (test_addr
, last_addr
, EQ
, NULL_RTX
, Pmode
, 1,
1668 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1669 temp
= expand_binop (Pmode
, STACK_GROW_OPTAB
, test_addr
,
1670 gen_int_mode (PROBE_INTERVAL
, Pmode
), test_addr
,
1673 gcc_assert (temp
== test_addr
);
1675 /* Probe at TEST_ADDR. */
1676 emit_stack_probe (test_addr
);
1678 emit_jump (loop_lab
);
1680 emit_label (end_lab
);
1683 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1684 that SIZE is equal to ROUNDED_SIZE. */
1686 /* TEMP = SIZE - ROUNDED_SIZE. */
1687 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1688 if (temp
!= const0_rtx
)
1692 if (CONST_INT_P (temp
))
1694 /* Use [base + disp} addressing mode if supported. */
1695 HOST_WIDE_INT offset
= INTVAL (temp
);
1696 addr
= memory_address (Pmode
,
1697 plus_constant (Pmode
, last_addr
,
1698 STACK_GROW_OFF (offset
)));
1702 /* Manual CSE if the difference is not known at compile-time. */
1703 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1704 addr
= memory_address (Pmode
,
1705 gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1709 emit_stack_probe (addr
);
1713 /* Make sure nothing is scheduled before we are done. */
1714 emit_insn (gen_blockage ());
1717 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1718 while probing it. This pushes when SIZE is positive. SIZE need not
1719 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1720 by plus SIZE at the end. */
1723 anti_adjust_stack_and_probe (rtx size
, bool adjust_back
)
1725 /* We skip the probe for the first interval + a small dope of 4 words and
1726 probe that many bytes past the specified size to maintain a protection
1727 area at the botton of the stack. */
1728 const int dope
= 4 * UNITS_PER_WORD
;
1730 /* First ensure SIZE is Pmode. */
1731 if (GET_MODE (size
) != VOIDmode
&& GET_MODE (size
) != Pmode
)
1732 size
= convert_to_mode (Pmode
, size
, 1);
1734 /* If we have a constant small number of probes to generate, that's the
1736 if (CONST_INT_P (size
) && INTVAL (size
) < 7 * PROBE_INTERVAL
)
1738 HOST_WIDE_INT isize
= INTVAL (size
), i
;
1739 bool first_probe
= true;
1741 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1742 values of N from 1 until it exceeds SIZE. If only one probe is
1743 needed, this will not generate any code. Then adjust and probe
1744 to PROBE_INTERVAL + SIZE. */
1745 for (i
= PROBE_INTERVAL
; i
< isize
; i
+= PROBE_INTERVAL
)
1749 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL
+ dope
));
1750 first_probe
= false;
1753 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1754 emit_stack_probe (stack_pointer_rtx
);
1758 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
1760 anti_adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
- i
));
1761 emit_stack_probe (stack_pointer_rtx
);
1764 /* In the variable case, do the same as above, but in a loop. Note that we
1765 must be extra careful with variables wrapping around because we might be
1766 at the very top (or the very bottom) of the address space and we have to
1767 be able to handle this case properly; in particular, we use an equality
1768 test for the loop condition. */
1771 rtx rounded_size
, rounded_size_op
, last_addr
, temp
;
1772 rtx_code_label
*loop_lab
= gen_label_rtx ();
1773 rtx_code_label
*end_lab
= gen_label_rtx ();
1776 /* Step 1: round SIZE to the previous multiple of the interval. */
1778 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1780 = simplify_gen_binary (AND
, Pmode
, size
,
1781 gen_int_mode (-PROBE_INTERVAL
, Pmode
));
1782 rounded_size_op
= force_operand (rounded_size
, NULL_RTX
);
1785 /* Step 2: compute initial and final value of the loop counter. */
1787 /* SP = SP_0 + PROBE_INTERVAL. */
1788 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1790 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1791 last_addr
= force_operand (gen_rtx_fmt_ee (STACK_GROW_OP
, Pmode
,
1793 rounded_size_op
), NULL_RTX
);
1798 while (SP != LAST_ADDR)
1800 SP = SP + PROBE_INTERVAL
1804 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1805 values of N from 1 until it is equal to ROUNDED_SIZE. */
1807 emit_label (loop_lab
);
1809 /* Jump to END_LAB if SP == LAST_ADDR. */
1810 emit_cmp_and_jump_insns (stack_pointer_rtx
, last_addr
, EQ
, NULL_RTX
,
1813 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1814 anti_adjust_stack (GEN_INT (PROBE_INTERVAL
));
1815 emit_stack_probe (stack_pointer_rtx
);
1817 emit_jump (loop_lab
);
1819 emit_label (end_lab
);
1822 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1823 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1825 /* TEMP = SIZE - ROUNDED_SIZE. */
1826 temp
= simplify_gen_binary (MINUS
, Pmode
, size
, rounded_size
);
1827 if (temp
!= const0_rtx
)
1829 /* Manual CSE if the difference is not known at compile-time. */
1830 if (GET_CODE (temp
) != CONST_INT
)
1831 temp
= gen_rtx_MINUS (Pmode
, size
, rounded_size_op
);
1832 anti_adjust_stack (temp
);
1833 emit_stack_probe (stack_pointer_rtx
);
1837 /* Adjust back and account for the additional first interval. */
1839 adjust_stack (plus_constant (Pmode
, size
, PROBE_INTERVAL
+ dope
));
1841 adjust_stack (GEN_INT (PROBE_INTERVAL
+ dope
));
1844 /* Return an rtx representing the register or memory location
1845 in which a scalar value of data type VALTYPE
1846 was returned by a function call to function FUNC.
1847 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1848 function is known, otherwise 0.
1849 OUTGOING is 1 if on a machine with register windows this function
1850 should return the register in which the function will put its result
1854 hard_function_value (const_tree valtype
, const_tree func
, const_tree fntype
,
1855 int outgoing ATTRIBUTE_UNUSED
)
1859 val
= targetm
.calls
.function_value (valtype
, func
? func
: fntype
, outgoing
);
1862 && GET_MODE (val
) == BLKmode
)
1864 unsigned HOST_WIDE_INT bytes
= int_size_in_bytes (valtype
);
1865 machine_mode tmpmode
;
1867 /* int_size_in_bytes can return -1. We don't need a check here
1868 since the value of bytes will then be large enough that no
1869 mode will match anyway. */
1871 for (tmpmode
= GET_CLASS_NARROWEST_MODE (MODE_INT
);
1872 tmpmode
!= VOIDmode
;
1873 tmpmode
= GET_MODE_WIDER_MODE (tmpmode
))
1875 /* Have we found a large enough mode? */
1876 if (GET_MODE_SIZE (tmpmode
) >= bytes
)
1880 /* No suitable mode found. */
1881 gcc_assert (tmpmode
!= VOIDmode
);
1883 PUT_MODE (val
, tmpmode
);
1888 /* Return an rtx representing the register or memory location
1889 in which a scalar value of mode MODE was returned by a library call. */
1892 hard_libcall_value (machine_mode mode
, rtx fun
)
1894 return targetm
.calls
.libcall_value (mode
, fun
);
1897 /* Look up the tree code for a given rtx code
1898 to provide the arithmetic operation for REAL_ARITHMETIC.
1899 The function returns an int because the caller may not know
1900 what `enum tree_code' means. */
1903 rtx_to_tree_code (enum rtx_code code
)
1905 enum tree_code tcode
;
1928 tcode
= LAST_AND_UNUSED_TREE_CODE
;
1931 return ((int) tcode
);
1934 #include "gt-explow.h"