]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
2015-06-17 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
7fa774cd 1/* Subroutines for manipulating rtx's in semantically interesting ways.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
7fa774cd 3
f12b58b3 4This file is part of GCC.
7fa774cd 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
7fa774cd 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
7fa774cd 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
7fa774cd 19
20
21#include "config.h"
405711de 22#include "system.h"
805e22b2 23#include "coretypes.h"
24#include "tm.h"
0b205f4c 25#include "diagnostic-core.h"
7fa774cd 26#include "rtl.h"
b20a8bb4 27#include "alias.h"
28#include "symtab.h"
7fa774cd 29#include "tree.h"
9ed99284 30#include "stor-layout.h"
7953c610 31#include "tm_p.h"
7fa774cd 32#include "flags.h"
4852b829 33#include "except.h"
a3020f2f 34#include "hard-reg-set.h"
0a893c29 35#include "function.h"
d53441c8 36#include "insn-config.h"
37#include "expmed.h"
38#include "dojump.h"
39#include "explow.h"
40#include "calls.h"
41#include "emit-rtl.h"
42#include "varasm.h"
43#include "stmt.h"
7fa774cd 44#include "expr.h"
34517c64 45#include "insn-codes.h"
d8fc4d0b 46#include "optabs.h"
e3805e9e 47#include "libfuncs.h"
7fa774cd 48#include "recog.h"
c3f16ae3 49#include "langhooks.h"
46b3ff29 50#include "target.h"
218e3e4e 51#include "common/common-target.h"
f2d0e9f1 52#include "output.h"
7fa774cd 53
35cb5232 54static rtx break_out_memory_refs (rtx);
b2345915 55
56
57/* Truncate and perhaps sign-extend C as appropriate for MODE. */
58
59HOST_WIDE_INT
3754d046 60trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
b2345915 61{
ded805e6 62 int width = GET_MODE_PRECISION (mode);
b2345915 63
dd067362 64 /* You want to truncate to a _what_? */
058a1b7a 65 gcc_assert (SCALAR_INT_MODE_P (mode)
66 || POINTER_BOUNDS_MODE_P (mode));
dd067362 67
dea049dc 68 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
69 if (mode == BImode)
70 return c & 1 ? STORE_FLAG_VALUE : 0;
71
679dcb76 72 /* Sign-extend for the requested mode. */
73
74 if (width < HOST_BITS_PER_WIDE_INT)
75 {
76 HOST_WIDE_INT sign = 1;
77 sign <<= width - 1;
78 c &= (sign << 1) - 1;
79 c ^= sign;
80 c -= sign;
81 }
b2345915 82
83 return c;
84}
85
db20fb47 86/* Return an rtx for the sum of X and the integer C, given that X has
5cc04e45 87 mode MODE. INPLACE is true if X can be modified inplace or false
88 if it must be treated as immutable. */
7fa774cd 89
90rtx
3754d046 91plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
5cc04e45 92 bool inplace)
7fa774cd 93{
19cb6b50 94 RTX_CODE code;
a42e6220 95 rtx y;
19cb6b50 96 rtx tem;
7fa774cd 97 int all_constant = 0;
98
29c05e22 99 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
100
7fa774cd 101 if (c == 0)
102 return x;
103
104 restart:
105
106 code = GET_CODE (x);
a42e6220 107 y = x;
108
7fa774cd 109 switch (code)
110 {
e913b5cd 111 CASE_CONST_SCALAR_INT:
796b6678 112 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
113 mode);
7fa774cd 114 case MEM:
115 /* If this is a reference to the constant pool, try replacing it with
116 a reference to a new constant. If the resulting address isn't
117 valid, don't return it because we have no way to validize it. */
118 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120 {
29c05e22 121 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
db20fb47 122 tem = force_const_mem (GET_MODE (x), tem);
2effb064 123 /* Targets may disallow some constants in the constant pool, thus
124 force_const_mem may return NULL_RTX. */
125 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
7fa774cd 126 return tem;
127 }
128 break;
129
130 case CONST:
131 /* If adding to something entirely constant, set a flag
132 so that we can add a CONST around the result. */
5cc04e45 133 if (inplace && shared_const_p (x))
134 inplace = false;
7fa774cd 135 x = XEXP (x, 0);
136 all_constant = 1;
137 goto restart;
138
139 case SYMBOL_REF:
140 case LABEL_REF:
141 all_constant = 1;
142 break;
143
144 case PLUS:
db20fb47 145 /* The interesting case is adding the integer to a sum. Look
146 for constant term in the sum and combine with C. For an
147 integer constant term or a constant term that is not an
148 explicit integer, we combine or group them together anyway.
986b0677 149
150 We may not immediately return from the recursive call here, lest
151 all_constant gets lost. */
530f560b 152
db20fb47 153 if (CONSTANT_P (XEXP (x, 1)))
986b0677 154 {
5cc04e45 155 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
156 if (term == const0_rtx)
157 x = XEXP (x, 0);
158 else if (inplace)
159 XEXP (x, 1) = term;
160 else
161 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
986b0677 162 c = 0;
163 }
5cc04e45 164 else if (rtx *const_loc = find_constant_term_loc (&y))
986b0677 165 {
5cc04e45 166 if (!inplace)
167 {
168 /* We need to be careful since X may be shared and we can't
169 modify it in place. */
170 x = copy_rtx (x);
171 const_loc = find_constant_term_loc (&x);
172 }
173 *const_loc = plus_constant (mode, *const_loc, c, true);
986b0677 174 c = 0;
175 }
941522d6 176 break;
b244d4c7 177
941522d6 178 default:
179 break;
7fa774cd 180 }
181
182 if (c != 0)
c338f2e3 183 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
7fa774cd 184
185 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
186 return x;
187 else if (all_constant)
941522d6 188 return gen_rtx_CONST (mode, x);
7fa774cd 189 else
190 return x;
191}
7fa774cd 192\f
193/* If X is a sum, return a new sum like X but lacking any constant terms.
194 Add all the removed constant terms into *CONSTPTR.
195 X itself is not altered. The result != X if and only if
196 it is not isomorphic to X. */
197
198rtx
35cb5232 199eliminate_constant_term (rtx x, rtx *constptr)
7fa774cd 200{
19cb6b50 201 rtx x0, x1;
7fa774cd 202 rtx tem;
203
204 if (GET_CODE (x) != PLUS)
205 return x;
206
207 /* First handle constants appearing at this level explicitly. */
971ba038 208 if (CONST_INT_P (XEXP (x, 1))
7fa774cd 209 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
210 XEXP (x, 1)))
971ba038 211 && CONST_INT_P (tem))
7fa774cd 212 {
213 *constptr = tem;
214 return eliminate_constant_term (XEXP (x, 0), constptr);
215 }
216
217 tem = const0_rtx;
218 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
219 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
220 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
221 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
222 *constptr, tem))
971ba038 223 && CONST_INT_P (tem))
7fa774cd 224 {
225 *constptr = tem;
941522d6 226 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
7fa774cd 227 }
228
229 return x;
230}
231
7fa774cd 232\f
233/* Return a copy of X in which all memory references
234 and all constants that involve symbol refs
235 have been replaced with new temporary registers.
236 Also emit code to load the memory locations and constants
237 into those registers.
238
239 If X contains no such constants or memory references,
240 X itself (not a copy) is returned.
241
242 If a constant is found in the address that is not a legitimate constant
243 in an insn, it is left alone in the hope that it might be valid in the
244 address.
245
246 X may contain no arithmetic except addition, subtraction and multiplication.
247 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
248
249static rtx
35cb5232 250break_out_memory_refs (rtx x)
7fa774cd 251{
e16ceb8e 252 if (MEM_P (x)
e6f7d557 253 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
7fa774cd 254 && GET_MODE (x) != VOIDmode))
46c86782 255 x = force_reg (GET_MODE (x), x);
7fa774cd 256 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
257 || GET_CODE (x) == MULT)
258 {
19cb6b50 259 rtx op0 = break_out_memory_refs (XEXP (x, 0));
260 rtx op1 = break_out_memory_refs (XEXP (x, 1));
46c86782 261
7fa774cd 262 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
98155838 263 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7fa774cd 264 }
46c86782 265
7fa774cd 266 return x;
267}
268
98155838 269/* Given X, a memory address in address space AS' pointer mode, convert it to
270 an address in the address space's address mode, or vice versa (TO_MODE says
271 which way). We take advantage of the fact that pointers are not allowed to
272 overflow by commuting arithmetic operations over conversions so that address
98396fac 273 arithmetic insns can be used. IN_CONST is true if this conversion is inside
274 a CONST. */
184aad03 275
98396fac 276static rtx
3754d046 277convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
98396fac 278 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
b8f31768 279 bool in_const ATTRIBUTE_UNUSED)
184aad03 280{
85d654dd 281#ifndef POINTERS_EXTEND_UNSIGNED
42f5572e 282 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
85d654dd 283 return x;
284#else /* defined(POINTERS_EXTEND_UNSIGNED) */
3754d046 285 machine_mode pointer_mode, address_mode, from_mode;
4a8e9301 286 rtx temp;
e5716f7e 287 enum rtx_code code;
4a8e9301 288
85d654dd 289 /* If X already has the right mode, just return it. */
290 if (GET_MODE (x) == to_mode)
291 return x;
292
98155838 293 pointer_mode = targetm.addr_space.pointer_mode (as);
294 address_mode = targetm.addr_space.address_mode (as);
295 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
85d654dd 296
52df3724 297 /* Here we handle some special cases. If none of them apply, fall through
298 to the default case. */
184aad03 299 switch (GET_CODE (x))
300 {
0349edce 301 CASE_CONST_SCALAR_INT:
e5716f7e 302 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
303 code = TRUNCATE;
304 else if (POINTERS_EXTEND_UNSIGNED < 0)
305 break;
306 else if (POINTERS_EXTEND_UNSIGNED > 0)
307 code = ZERO_EXTEND;
308 else
309 code = SIGN_EXTEND;
310 temp = simplify_unary_operation (code, to_mode, x, from_mode);
311 if (temp)
312 return temp;
313 break;
4a8e9301 314
bc17f7a4 315 case SUBREG:
9fd73f31 316 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
3cc092f7 317 && GET_MODE (SUBREG_REG (x)) == to_mode)
bc17f7a4 318 return SUBREG_REG (x);
319 break;
320
184aad03 321 case LABEL_REF:
b49f2e4b 322 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
9fd73f31 323 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
324 return temp;
3cc092f7 325 break;
4a8e9301 326
184aad03 327 case SYMBOL_REF:
ae5242d3 328 temp = shallow_copy_rtx (x);
329 PUT_MODE (temp, to_mode);
9fd73f31 330 return temp;
3cc092f7 331 break;
184aad03 332
4a8e9301 333 case CONST:
9fd73f31 334 return gen_rtx_CONST (to_mode,
98396fac 335 convert_memory_address_addr_space_1
336 (to_mode, XEXP (x, 0), as, true));
3cc092f7 337 break;
184aad03 338
52df3724 339 case PLUS:
340 case MULT:
aeb17b4b 341 /* For addition we can safely permute the conversion and addition
342 operation if one operand is a constant and converting the constant
343 does not change it or if one operand is a constant and we are
344 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
2800ac1a 345 We can always safely permute them if we are making the address
98396fac 346 narrower. Inside a CONST RTL, this is safe for both pointers
347 zero or sign extended as pointers cannot wrap. */
e5716f7e 348 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
349 || (GET_CODE (x) == PLUS
971ba038 350 && CONST_INT_P (XEXP (x, 1))
98396fac 351 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
352 || XEXP (x, 1) == convert_memory_address_addr_space_1
353 (to_mode, XEXP (x, 1), as, in_const)
354 || POINTERS_EXTEND_UNSIGNED < 0)))
6be48139 355 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
98396fac 356 convert_memory_address_addr_space_1
357 (to_mode, XEXP (x, 0), as, in_const),
e5716f7e 358 XEXP (x, 1));
941522d6 359 break;
6be48139 360
941522d6 361 default:
362 break;
184aad03 363 }
52df3724 364
365 return convert_modes (to_mode, from_mode,
366 x, POINTERS_EXTEND_UNSIGNED);
85d654dd 367#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
184aad03 368}
98396fac 369
370/* Given X, a memory address in address space AS' pointer mode, convert it to
371 an address in the address space's address mode, or vice versa (TO_MODE says
372 which way). We take advantage of the fact that pointers are not allowed to
373 overflow by commuting arithmetic operations over conversions so that address
374 arithmetic insns can be used. */
375
376rtx
3754d046 377convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
98396fac 378{
379 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
380}
7fa774cd 381\f
d53441c8 382
bd1a81f7 383/* Return something equivalent to X but valid as a memory address for something
384 of mode MODE in the named address space AS. When X is not itself valid,
385 this works by copying X or subexpressions of it into registers. */
7fa774cd 386
387rtx
3754d046 388memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
7fa774cd 389{
19cb6b50 390 rtx oldx = x;
3754d046 391 machine_mode address_mode = targetm.addr_space.address_mode (as);
7fa774cd 392
98155838 393 x = convert_memory_address_addr_space (address_mode, x, as);
184aad03 394
c7bf1374 395 /* By passing constant addresses through registers
7fa774cd 396 we get a chance to cse them. */
e6f7d557 397 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
98155838 398 x = force_reg (address_mode, x);
7fa774cd 399
7fa774cd 400 /* We get better cse by rejecting indirect addressing at this stage.
401 Let the combiner create indirect addresses where appropriate.
402 For now, generate the code so that the subexpressions useful to share
403 are visible. But not if cse won't be done! */
3a6d729e 404 else
7fa774cd 405 {
8ad4c111 406 if (! cse_not_expected && !REG_P (x))
3a6d729e 407 x = break_out_memory_refs (x);
408
409 /* At this point, any valid address is accepted. */
bd1a81f7 410 if (memory_address_addr_space_p (mode, x, as))
4d25f9eb 411 goto done;
3a6d729e 412
413 /* If it was valid before but breaking out memory refs invalidated it,
414 use it the old way. */
bd1a81f7 415 if (memory_address_addr_space_p (mode, oldx, as))
4d25f9eb 416 {
417 x = oldx;
418 goto done;
419 }
3a6d729e 420
421 /* Perform machine-dependent transformations on X
422 in certain cases. This is not necessary since the code
423 below can handle all possible cases, but machine-dependent
424 transformations can make better code. */
41e3a0c7 425 {
bd1a81f7 426 rtx orig_x = x;
427 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
428 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
41e3a0c7 429 goto done;
430 }
3a6d729e 431
432 /* PLUS and MULT can appear in special ways
433 as the result of attempts to make an address usable for indexing.
434 Usually they are dealt with by calling force_operand, below.
435 But a sum containing constant terms is special
436 if removing them makes the sum a valid address:
437 then we generate that address in a register
438 and index off of it. We do this because it often makes
439 shorter code, and because the addresses thus generated
440 in registers often become common subexpressions. */
441 if (GET_CODE (x) == PLUS)
442 {
443 rtx constant_term = const0_rtx;
444 rtx y = eliminate_constant_term (x, &constant_term);
445 if (constant_term == const0_rtx
bd1a81f7 446 || ! memory_address_addr_space_p (mode, y, as))
3a6d729e 447 x = force_operand (x, NULL_RTX);
448 else
449 {
941522d6 450 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
bd1a81f7 451 if (! memory_address_addr_space_p (mode, y, as))
3a6d729e 452 x = force_operand (x, NULL_RTX);
453 else
454 x = y;
455 }
456 }
7fa774cd 457
492820ac 458 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
3a6d729e 459 x = force_operand (x, NULL_RTX);
7fa774cd 460
3a6d729e 461 /* If we have a register that's an invalid address,
462 it must be a hard reg of the wrong class. Copy it to a pseudo. */
8ad4c111 463 else if (REG_P (x))
3a6d729e 464 x = copy_to_reg (x);
465
466 /* Last resort: copy the value to a register, since
467 the register is a valid address. */
468 else
98155838 469 x = force_reg (address_mode, x);
7fa774cd 470 }
3a6d729e 471
472 done:
473
bd1a81f7 474 gcc_assert (memory_address_addr_space_p (mode, x, as));
46c86782 475 /* If we didn't change the address, we are done. Otherwise, mark
476 a reg as a pointer if we have REG or REG + CONST_INT. */
477 if (oldx == x)
478 return x;
8ad4c111 479 else if (REG_P (x))
80909c64 480 mark_reg_pointer (x, BITS_PER_UNIT);
46c86782 481 else if (GET_CODE (x) == PLUS
8ad4c111 482 && REG_P (XEXP (x, 0))
971ba038 483 && CONST_INT_P (XEXP (x, 1)))
80909c64 484 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
46c86782 485
3a6d729e 486 /* OLDX may have been the address on a temporary. Update the address
487 to indicate that X is now used. */
488 update_temp_slot_address (oldx, x);
489
7fa774cd 490 return x;
491}
492
d2b9158b 493/* If REF is a MEM with an invalid address, change it into a valid address.
494 Pass through anything else unchanged. REF must be an unshared rtx and
495 the function may modify it in-place. */
7fa774cd 496
497rtx
35cb5232 498validize_mem (rtx ref)
7fa774cd 499{
e16ceb8e 500 if (!MEM_P (ref))
7fa774cd 501 return ref;
f2d0e9f1 502 ref = use_anchored_address (ref);
bd1a81f7 503 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
504 MEM_ADDR_SPACE (ref)))
7fa774cd 505 return ref;
537ffcfc 506
d2b9158b 507 return replace_equiv_address (ref, XEXP (ref, 0), true);
7fa774cd 508}
f2d0e9f1 509
510/* If X is a memory reference to a member of an object block, try rewriting
511 it to use an anchor instead. Return the new memory reference on success
512 and the old one on failure. */
513
514rtx
515use_anchored_address (rtx x)
516{
517 rtx base;
518 HOST_WIDE_INT offset;
3754d046 519 machine_mode mode;
f2d0e9f1 520
521 if (!flag_section_anchors)
522 return x;
523
524 if (!MEM_P (x))
525 return x;
526
527 /* Split the address into a base and offset. */
528 base = XEXP (x, 0);
529 offset = 0;
530 if (GET_CODE (base) == CONST
531 && GET_CODE (XEXP (base, 0)) == PLUS
971ba038 532 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
f2d0e9f1 533 {
534 offset += INTVAL (XEXP (XEXP (base, 0), 1));
535 base = XEXP (XEXP (base, 0), 0);
536 }
537
538 /* Check whether BASE is suitable for anchors. */
539 if (GET_CODE (base) != SYMBOL_REF
6617cbc1 540 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
f2d0e9f1 541 || SYMBOL_REF_ANCHOR_P (base)
102e3995 542 || SYMBOL_REF_BLOCK (base) == NULL
f2d0e9f1 543 || !targetm.use_anchors_for_symbol_p (base))
544 return x;
545
546 /* Decide where BASE is going to be. */
547 place_block_symbol (base);
548
549 /* Get the anchor we need to use. */
550 offset += SYMBOL_REF_BLOCK_OFFSET (base);
551 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
552 SYMBOL_REF_TLS_MODEL (base));
553
554 /* Work out the offset from the anchor. */
555 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
556
557 /* If we're going to run a CSE pass, force the anchor into a register.
558 We will then be able to reuse registers for several accesses, if the
559 target costs say that that's worthwhile. */
29c05e22 560 mode = GET_MODE (base);
f2d0e9f1 561 if (!cse_not_expected)
29c05e22 562 base = force_reg (mode, base);
f2d0e9f1 563
29c05e22 564 return replace_equiv_address (x, plus_constant (mode, base, offset));
f2d0e9f1 565}
7fa774cd 566\f
7fa774cd 567/* Copy the value or contents of X to a new temp reg and return that reg. */
568
569rtx
35cb5232 570copy_to_reg (rtx x)
7fa774cd 571{
19cb6b50 572 rtx temp = gen_reg_rtx (GET_MODE (x));
6be48139 573
7fa774cd 574 /* If not an operand, must be an address with PLUS and MULT so
6be48139 575 do the computation. */
7fa774cd 576 if (! general_operand (x, VOIDmode))
577 x = force_operand (x, temp);
6be48139 578
7fa774cd 579 if (x != temp)
580 emit_move_insn (temp, x);
581
582 return temp;
583}
584
585/* Like copy_to_reg but always give the new register mode Pmode
586 in case X is a constant. */
587
588rtx
35cb5232 589copy_addr_to_reg (rtx x)
7fa774cd 590{
591 return copy_to_mode_reg (Pmode, x);
592}
593
594/* Like copy_to_reg but always give the new register mode MODE
595 in case X is a constant. */
596
597rtx
3754d046 598copy_to_mode_reg (machine_mode mode, rtx x)
7fa774cd 599{
19cb6b50 600 rtx temp = gen_reg_rtx (mode);
6be48139 601
7fa774cd 602 /* If not an operand, must be an address with PLUS and MULT so
6be48139 603 do the computation. */
7fa774cd 604 if (! general_operand (x, VOIDmode))
605 x = force_operand (x, temp);
606
611234b4 607 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
7fa774cd 608 if (x != temp)
609 emit_move_insn (temp, x);
610 return temp;
611}
612
613/* Load X into a register if it is not already one.
614 Use mode MODE for the register.
615 X should be valid for mode MODE, but it may be a constant which
616 is valid for all integer modes; that's why caller must specify MODE.
617
618 The caller must not alter the value in the register we return,
619 since we mark it as a "constant" register. */
620
621rtx
3754d046 622force_reg (machine_mode mode, rtx x)
7fa774cd 623{
a903c451 624 rtx temp, set;
625 rtx_insn *insn;
7fa774cd 626
8ad4c111 627 if (REG_P (x))
7fa774cd 628 return x;
6be48139 629
fac6aae6 630 if (general_operand (x, mode))
631 {
632 temp = gen_reg_rtx (mode);
633 insn = emit_move_insn (temp, x);
634 }
635 else
636 {
637 temp = force_operand (x, NULL_RTX);
8ad4c111 638 if (REG_P (temp))
fac6aae6 639 insn = get_last_insn ();
640 else
641 {
642 rtx temp2 = gen_reg_rtx (mode);
643 insn = emit_move_insn (temp2, temp);
644 temp = temp2;
645 }
646 }
b3c85201 647
7fa774cd 648 /* Let optimizers know that TEMP's value never changes
b3c85201 649 and that X can be substituted for it. Don't get confused
650 if INSN set something else (such as a SUBREG of TEMP). */
651 if (CONSTANT_P (x)
652 && (set = single_set (insn)) != 0
63160ce9 653 && SET_DEST (set) == temp
654 && ! rtx_equal_p (x, SET_SRC (set)))
c080d8f0 655 set_unique_reg_note (insn, REG_EQUAL, x);
fac6aae6 656
62350d6c 657 /* Let optimizers know that TEMP is a pointer, and if so, the
658 known alignment of that pointer. */
659 {
660 unsigned align = 0;
661 if (GET_CODE (x) == SYMBOL_REF)
662 {
663 align = BITS_PER_UNIT;
664 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
665 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
666 }
667 else if (GET_CODE (x) == LABEL_REF)
668 align = BITS_PER_UNIT;
669 else if (GET_CODE (x) == CONST
670 && GET_CODE (XEXP (x, 0)) == PLUS
671 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
971ba038 672 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
62350d6c 673 {
674 rtx s = XEXP (XEXP (x, 0), 0);
675 rtx c = XEXP (XEXP (x, 0), 1);
676 unsigned sa, ca;
677
678 sa = BITS_PER_UNIT;
679 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
680 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
681
7e8d812e 682 if (INTVAL (c) == 0)
683 align = sa;
684 else
685 {
686 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
687 align = MIN (sa, ca);
688 }
62350d6c 689 }
690
40b93dba 691 if (align || (MEM_P (x) && MEM_POINTER (x)))
62350d6c 692 mark_reg_pointer (temp, align);
693 }
694
7fa774cd 695 return temp;
696}
697
698/* If X is a memory ref, copy its contents to a new temp reg and return
699 that reg. Otherwise, return X. */
700
701rtx
35cb5232 702force_not_mem (rtx x)
7fa774cd 703{
19cb6b50 704 rtx temp;
bf6a742f 705
e16ceb8e 706 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
7fa774cd 707 return x;
bf6a742f 708
7fa774cd 709 temp = gen_reg_rtx (GET_MODE (x));
8d350e69 710
711 if (MEM_POINTER (x))
712 REG_POINTER (temp) = 1;
713
7fa774cd 714 emit_move_insn (temp, x);
715 return temp;
716}
717
718/* Copy X to TARGET (if it's nonzero and a reg)
719 or to a new temp reg and return that reg.
720 MODE is the mode to use for X in case it is a constant. */
721
722rtx
3754d046 723copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
7fa774cd 724{
19cb6b50 725 rtx temp;
7fa774cd 726
8ad4c111 727 if (target && REG_P (target))
7fa774cd 728 temp = target;
729 else
730 temp = gen_reg_rtx (mode);
731
732 emit_move_insn (temp, x);
733 return temp;
734}
735\f
3b2411a8 736/* Return the mode to use to pass or return a scalar of TYPE and MODE.
f9aab3b6 737 PUNSIGNEDP points to the signedness of the type and may be adjusted
738 to show what signedness to use on extension operations.
739
3b2411a8 740 FOR_RETURN is nonzero if the caller is promoting the return value
741 of FNDECL, else it is for promoting args. */
f9aab3b6 742
3754d046 743machine_mode
744promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
3b2411a8 745 const_tree funtype, int for_return)
746{
adaf4ef0 747 /* Called without a type node for a libcall. */
748 if (type == NULL_TREE)
749 {
750 if (INTEGRAL_MODE_P (mode))
751 return targetm.calls.promote_function_mode (NULL_TREE, mode,
752 punsignedp, funtype,
753 for_return);
754 else
755 return mode;
756 }
757
3b2411a8 758 switch (TREE_CODE (type))
759 {
760 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
761 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
762 case POINTER_TYPE: case REFERENCE_TYPE:
763 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
764 for_return);
765
766 default:
767 return mode;
768 }
769}
770/* Return the mode to use to store a scalar of TYPE and MODE.
771 PUNSIGNEDP points to the signedness of the type and may be adjusted
772 to show what signedness to use on extension operations. */
19347327 773
3754d046 774machine_mode
775promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
2888b920 776 int *punsignedp ATTRIBUTE_UNUSED)
f9aab3b6 777{
49130e9a 778#ifdef PROMOTE_MODE
779 enum tree_code code;
780 int unsignedp;
781#endif
782
adaf4ef0 783 /* For libcalls this is invoked without TYPE from the backends
784 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
785 case. */
786 if (type == NULL_TREE)
787 return mode;
788
3b2411a8 789 /* FIXME: this is the same logic that was there until GCC 4.4, but we
790 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
791 is not defined. The affected targets are M32C, S390, SPARC. */
792#ifdef PROMOTE_MODE
49130e9a 793 code = TREE_CODE (type);
794 unsignedp = *punsignedp;
f9aab3b6 795
f9aab3b6 796 switch (code)
797 {
f9aab3b6 798 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
06f0b99c 799 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
3b2411a8 800 PROMOTE_MODE (mode, unsignedp, type);
801 *punsignedp = unsignedp;
802 return mode;
f9aab3b6 803 break;
f9aab3b6 804
184aad03 805#ifdef POINTERS_EXTEND_UNSIGNED
bc244e4c 806 case REFERENCE_TYPE:
f9aab3b6 807 case POINTER_TYPE:
3b2411a8 808 *punsignedp = POINTERS_EXTEND_UNSIGNED;
98155838 809 return targetm.addr_space.address_mode
810 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
f9aab3b6 811 break;
184aad03 812#endif
6be48139 813
941522d6 814 default:
3b2411a8 815 return mode;
f9aab3b6 816 }
3b2411a8 817#else
f9aab3b6 818 return mode;
3b2411a8 819#endif
f9aab3b6 820}
3b2411a8 821
822
823/* Use one of promote_mode or promote_function_mode to find the promoted
824 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
825 of DECL after promotion. */
826
3754d046 827machine_mode
3b2411a8 828promote_decl_mode (const_tree decl, int *punsignedp)
829{
830 tree type = TREE_TYPE (decl);
831 int unsignedp = TYPE_UNSIGNED (type);
3754d046 832 machine_mode mode = DECL_MODE (decl);
833 machine_mode pmode;
3b2411a8 834
c879dbcf 835 if (TREE_CODE (decl) == RESULT_DECL
836 || TREE_CODE (decl) == PARM_DECL)
3b2411a8 837 pmode = promote_function_mode (type, mode, &unsignedp,
c879dbcf 838 TREE_TYPE (current_function_decl), 2);
3b2411a8 839 else
840 pmode = promote_mode (type, mode, &unsignedp);
841
842 if (punsignedp)
843 *punsignedp = unsignedp;
844 return pmode;
845}
846
f9aab3b6 847\f
dfe00a8f 848/* Controls the behaviour of {anti_,}adjust_stack. */
849static bool suppress_reg_args_size;
850
851/* A helper for adjust_stack and anti_adjust_stack. */
852
853static void
854adjust_stack_1 (rtx adjust, bool anti_p)
855{
a903c451 856 rtx temp;
857 rtx_insn *insn;
dfe00a8f 858
dfe00a8f 859 /* Hereafter anti_p means subtract_p. */
3764c94e 860 if (!STACK_GROWS_DOWNWARD)
861 anti_p = !anti_p;
dfe00a8f 862
863 temp = expand_binop (Pmode,
864 anti_p ? sub_optab : add_optab,
865 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
866 OPTAB_LIB_WIDEN);
867
868 if (temp != stack_pointer_rtx)
869 insn = emit_move_insn (stack_pointer_rtx, temp);
870 else
871 {
872 insn = get_last_insn ();
873 temp = single_set (insn);
874 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
875 }
876
877 if (!suppress_reg_args_size)
878 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
879}
880
7fa774cd 881/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
882 This pops when ADJUST is positive. ADJUST need not be constant. */
883
884void
35cb5232 885adjust_stack (rtx adjust)
7fa774cd 886{
7fa774cd 887 if (adjust == const0_rtx)
888 return;
889
91b70175 890 /* We expect all variable sized adjustments to be multiple of
891 PREFERRED_STACK_BOUNDARY. */
971ba038 892 if (CONST_INT_P (adjust))
91b70175 893 stack_pointer_delta -= INTVAL (adjust);
894
dfe00a8f 895 adjust_stack_1 (adjust, false);
7fa774cd 896}
897
898/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
899 This pushes when ADJUST is positive. ADJUST need not be constant. */
900
901void
35cb5232 902anti_adjust_stack (rtx adjust)
7fa774cd 903{
7fa774cd 904 if (adjust == const0_rtx)
905 return;
906
91b70175 907 /* We expect all variable sized adjustments to be multiple of
908 PREFERRED_STACK_BOUNDARY. */
971ba038 909 if (CONST_INT_P (adjust))
91b70175 910 stack_pointer_delta += INTVAL (adjust);
911
dfe00a8f 912 adjust_stack_1 (adjust, true);
7fa774cd 913}
914
915/* Round the size of a block to be pushed up to the boundary required
916 by this machine. SIZE is the desired size, which need not be constant. */
917
69d39d70 918static rtx
35cb5232 919round_push (rtx size)
7fa774cd 920{
60778e62 921 rtx align_rtx, alignm1_rtx;
3737c3eb 922
60778e62 923 if (!SUPPORTS_STACK_ALIGNMENT
924 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
7fa774cd 925 {
60778e62 926 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
927
928 if (align == 1)
929 return size;
930
931 if (CONST_INT_P (size))
932 {
933 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
3737c3eb 934
60778e62 935 if (INTVAL (size) != new_size)
936 size = GEN_INT (new_size);
937 return size;
938 }
939
940 align_rtx = GEN_INT (align);
941 alignm1_rtx = GEN_INT (align - 1);
7fa774cd 942 }
943 else
944 {
60778e62 945 /* If crtl->preferred_stack_boundary might still grow, use
946 virtual_preferred_stack_boundary_rtx instead. This will be
947 substituted by the right value in vregs pass and optimized
948 during combine. */
949 align_rtx = virtual_preferred_stack_boundary_rtx;
29c05e22 950 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
951 NULL_RTX);
7fa774cd 952 }
3737c3eb 953
60778e62 954 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
955 but we know it can't. So add ourselves and then do
956 TRUNC_DIV_EXPR. */
957 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
958 NULL_RTX, 1, OPTAB_LIB_WIDEN);
959 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
960 NULL_RTX, 1);
961 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
962
7fa774cd 963 return size;
964}
965\f
dbd6697a 966/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
967 to a previously-created save area. If no save area has been allocated,
968 this function will allocate one. If a save area is specified, it
e9c97615 969 must be of the proper mode. */
dbd6697a 970
971void
e9c97615 972emit_stack_save (enum save_level save_level, rtx *psave)
dbd6697a 973{
974 rtx sa = *psave;
975 /* The default is that we use a move insn and save in a Pmode object. */
f9a00e9e 976 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
3754d046 977 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
dbd6697a 978
979 /* See if this machine has anything special to do for this kind of save. */
980 switch (save_level)
981 {
982#ifdef HAVE_save_stack_block
983 case SAVE_BLOCK:
984 if (HAVE_save_stack_block)
8cda90b9 985 fcn = gen_save_stack_block;
dbd6697a 986 break;
987#endif
988#ifdef HAVE_save_stack_function
989 case SAVE_FUNCTION:
990 if (HAVE_save_stack_function)
8cda90b9 991 fcn = gen_save_stack_function;
dbd6697a 992 break;
993#endif
994#ifdef HAVE_save_stack_nonlocal
995 case SAVE_NONLOCAL:
996 if (HAVE_save_stack_nonlocal)
8cda90b9 997 fcn = gen_save_stack_nonlocal;
dbd6697a 998 break;
999#endif
941522d6 1000 default:
1001 break;
dbd6697a 1002 }
1003
1004 /* If there is no save area and we have to allocate one, do so. Otherwise
1005 verify the save area is the proper mode. */
1006
1007 if (sa == 0)
1008 {
1009 if (mode != VOIDmode)
1010 {
1011 if (save_level == SAVE_NONLOCAL)
1012 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1013 else
1014 *psave = sa = gen_reg_rtx (mode);
1015 }
1016 }
dbd6697a 1017
e9c97615 1018 do_pending_stack_adjust ();
1019 if (sa != 0)
1020 sa = validize_mem (sa);
1021 emit_insn (fcn (sa, stack_pointer_rtx));
dbd6697a 1022}
1023
1024/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
e9c97615 1025 area made by emit_stack_save. If it is zero, we have nothing to do. */
dbd6697a 1026
1027void
e9c97615 1028emit_stack_restore (enum save_level save_level, rtx sa)
dbd6697a 1029{
1030 /* The default is that we use a move insn. */
f9a00e9e 1031 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
dbd6697a 1032
1ea0f42a 1033 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1034 STACK_POINTER and HARD_FRAME_POINTER.
1035 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1036 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1037 aligned variables, which is reflected in ix86_can_eliminate.
1038 We normally still have the realigned STACK_POINTER that we can use.
1039 But if there is a stack restore still present at reload, it can trigger
1040 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1041 FRAME_POINTER into a hard reg.
1042 To prevent this situation, we force need_drap if we emit a stack
1043 restore. */
1044 if (SUPPORTS_STACK_ALIGNMENT)
1045 crtl->need_drap = true;
1046
dbd6697a 1047 /* See if this machine has anything special to do for this kind of save. */
1048 switch (save_level)
1049 {
1050#ifdef HAVE_restore_stack_block
1051 case SAVE_BLOCK:
1052 if (HAVE_restore_stack_block)
1053 fcn = gen_restore_stack_block;
1054 break;
1055#endif
1056#ifdef HAVE_restore_stack_function
1057 case SAVE_FUNCTION:
1058 if (HAVE_restore_stack_function)
1059 fcn = gen_restore_stack_function;
1060 break;
1061#endif
1062#ifdef HAVE_restore_stack_nonlocal
dbd6697a 1063 case SAVE_NONLOCAL:
1064 if (HAVE_restore_stack_nonlocal)
1065 fcn = gen_restore_stack_nonlocal;
1066 break;
1067#endif
941522d6 1068 default:
1069 break;
dbd6697a 1070 }
1071
bbe57f89 1072 if (sa != 0)
62ede483 1073 {
1074 sa = validize_mem (sa);
1075 /* These clobbers prevent the scheduler from moving
1076 references to variable arrays below the code
7299020b 1077 that deletes (pops) the arrays. */
18b42941 1078 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1079 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
62ede483 1080 }
bbe57f89 1081
05ae776c 1082 discard_pending_stack_adjust ();
1083
e9c97615 1084 emit_insn (fcn (stack_pointer_rtx, sa));
dbd6697a 1085}
4ee9c684 1086
1087/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
97354ae4 1088 function. This should be called whenever we allocate or deallocate
1089 dynamic stack space. */
4ee9c684 1090
1091void
1092update_nonlocal_goto_save_area (void)
1093{
1094 tree t_save;
1095 rtx r_save;
1096
1097 /* The nonlocal_goto_save_area object is an array of N pointers. The
1098 first one is used for the frame pointer save; the rest are sized by
1099 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1100 of the stack save area slots. */
21dc8b2b 1101 t_save = build4 (ARRAY_REF,
1102 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1103 cfun->nonlocal_goto_save_area,
b55f9493 1104 integer_one_node, NULL_TREE, NULL_TREE);
4ee9c684 1105 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1106
e9c97615 1107 emit_stack_save (SAVE_NONLOCAL, &r_save);
4ee9c684 1108}
97354ae4 1109
1110/* Record a new stack level for the current function. This should be called
1111 whenever we allocate or deallocate dynamic stack space. */
1112
1113void
1114record_new_stack_level (void)
1115{
1116 /* Record the new stack level for nonlocal gotos. */
1117 if (cfun->nonlocal_goto_save_area)
1118 update_nonlocal_goto_save_area ();
1119
1120 /* Record the new stack level for SJLJ exceptions. */
1121 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1122 update_sjlj_context ();
1123}
dbd6697a 1124\f
7fa774cd 1125/* Return an rtx representing the address of an area of memory dynamically
5be42b39 1126 pushed on the stack.
7fa774cd 1127
1128 Any required stack pointer alignment is preserved.
1129
1130 SIZE is an rtx representing the size of the area.
83ad791a 1131
5be42b39 1132 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1133 parameter may be zero. If so, a proper value will be extracted
1134 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1135
1136 REQUIRED_ALIGN is the alignment (in bits) required for the region
1137 of memory.
990495a7 1138
1139 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1140 stack space allocated by the generated code cannot be added with itself
1141 in the course of the execution of the function. It is always safe to
1142 pass FALSE here and the following criterion is sufficient in order to
1143 pass TRUE: every path in the CFG that starts at the allocation point and
1144 loops to it executes the associated deallocation code. */
7fa774cd 1145
1146rtx
5be42b39 1147allocate_dynamic_stack_space (rtx size, unsigned size_align,
1148 unsigned required_align, bool cannot_accumulate)
7fa774cd 1149{
990495a7 1150 HOST_WIDE_INT stack_usage_size = -1;
a903c451 1151 rtx_code_label *final_label;
1152 rtx final_target, target;
8b51e3aa 1153 unsigned extra_align = 0;
5be42b39 1154 bool must_align;
990495a7 1155
c0a9c3cd 1156 /* If we're asking for zero bytes, it doesn't matter what we point
c3418f42 1157 to since we can't dereference it. But return a reasonable
c0a9c3cd 1158 address anyway. */
1159 if (size == const0_rtx)
1160 return virtual_stack_dynamic_rtx;
1161
1162 /* Otherwise, show we're calling alloca or equivalent. */
18d50ae6 1163 cfun->calls_alloca = 1;
c0a9c3cd 1164
990495a7 1165 /* If stack usage info is requested, look into the size we are passed.
1166 We need to do so this early to avoid the obfuscation that may be
1167 introduced later by the various alignment operations. */
8c0dd614 1168 if (flag_stack_usage_info)
990495a7 1169 {
60778e62 1170 if (CONST_INT_P (size))
990495a7 1171 stack_usage_size = INTVAL (size);
60778e62 1172 else if (REG_P (size))
990495a7 1173 {
1174 /* Look into the last emitted insn and see if we can deduce
1175 something for the register. */
a903c451 1176 rtx_insn *insn;
1177 rtx set, note;
990495a7 1178 insn = get_last_insn ();
1179 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1180 {
60778e62 1181 if (CONST_INT_P (SET_SRC (set)))
990495a7 1182 stack_usage_size = INTVAL (SET_SRC (set));
1183 else if ((note = find_reg_equal_equiv_note (insn))
60778e62 1184 && CONST_INT_P (XEXP (note, 0)))
990495a7 1185 stack_usage_size = INTVAL (XEXP (note, 0));
1186 }
1187 }
1188
1189 /* If the size is not constant, we can't say anything. */
1190 if (stack_usage_size == -1)
1191 {
1192 current_function_has_unbounded_dynamic_stack_size = 1;
1193 stack_usage_size = 0;
1194 }
1195 }
1196
7fa774cd 1197 /* Ensure the size is in the proper mode. */
1198 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1199 size = convert_to_mode (Pmode, size, 1);
1200
5be42b39 1201 /* Adjust SIZE_ALIGN, if needed. */
1202 if (CONST_INT_P (size))
1203 {
1204 unsigned HOST_WIDE_INT lsb;
1205
1206 lsb = INTVAL (size);
1207 lsb &= -lsb;
1208
1209 /* Watch out for overflow truncating to "unsigned". */
1210 if (lsb > UINT_MAX / BITS_PER_UNIT)
1211 size_align = 1u << (HOST_BITS_PER_INT - 1);
1212 else
1213 size_align = (unsigned)lsb * BITS_PER_UNIT;
1214 }
1215 else if (size_align < BITS_PER_UNIT)
1216 size_align = BITS_PER_UNIT;
1217
8b51e3aa 1218 /* We can't attempt to minimize alignment necessary, because we don't
1219 know the final value of preferred_stack_boundary yet while executing
1220 this code. */
1221 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1222 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1223
7fa774cd 1224 /* We will need to ensure that the address we return is aligned to
8b51e3aa 1225 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1226 always know its final value at this point in the compilation (it
1227 might depend on the size of the outgoing parameter lists, for
1228 example), so we must align the value to be returned in that case.
1229 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1230 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1231 We must also do an alignment operation on the returned value if
1232 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1233
1234 If we have to align, we must leave space in SIZE for the hole
1235 that might result from the alignment operation. */
1236
1237 must_align = (crtl->preferred_stack_boundary < required_align);
1238 if (must_align)
990495a7 1239 {
8b51e3aa 1240 if (required_align > PREFERRED_STACK_BOUNDARY)
1241 extra_align = PREFERRED_STACK_BOUNDARY;
1242 else if (required_align > STACK_BOUNDARY)
1243 extra_align = STACK_BOUNDARY;
1244 else
1245 extra_align = BITS_PER_UNIT;
156512c1 1246 }
1247
8b51e3aa 1248 /* ??? STACK_POINTER_OFFSET is always defined now. */
1249#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1250 must_align = true;
1251 extra_align = BITS_PER_UNIT;
1252#endif
156512c1 1253
8b51e3aa 1254 if (must_align)
1255 {
1256 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
5be42b39 1257
29c05e22 1258 size = plus_constant (Pmode, size, extra);
5be42b39 1259 size = force_operand (size, NULL_RTX);
990495a7 1260
8c0dd614 1261 if (flag_stack_usage_info)
5be42b39 1262 stack_usage_size += extra;
8b51e3aa 1263
5be42b39 1264 if (extra && size_align > extra_align)
1265 size_align = extra_align;
990495a7 1266 }
35be3c55 1267
7fa774cd 1268 /* Round the size to a multiple of the required stack alignment.
8b51e3aa 1269 Since the stack if presumed to be rounded before this allocation,
7fa774cd 1270 this will maintain the required alignment.
1271
1272 If the stack grows downward, we could save an insn by subtracting
1273 SIZE from the stack pointer and then aligning the stack pointer.
1274 The problem with this is that the stack pointer may be unaligned
1275 between the execution of the subtraction and alignment insns and
1276 some machines do not allow this. Even on those that do, some
1277 signal handlers malfunction if a signal should occur between those
1278 insns. Since this is an extremely rare event, we have no reliable
1279 way of knowing which systems have this problem. So we avoid even
1280 momentarily mis-aligning the stack. */
5be42b39 1281 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
990495a7 1282 {
1283 size = round_push (size);
7fa774cd 1284
8c0dd614 1285 if (flag_stack_usage_info)
990495a7 1286 {
60778e62 1287 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
990495a7 1288 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1289 }
1290 }
1291
5be42b39 1292 target = gen_reg_rtx (Pmode);
48b14f50 1293
990495a7 1294 /* The size is supposed to be fully adjusted at this point so record it
1295 if stack usage info is requested. */
8c0dd614 1296 if (flag_stack_usage_info)
990495a7 1297 {
1298 current_function_dynamic_stack_size += stack_usage_size;
1299
1300 /* ??? This is gross but the only safe stance in the absence
1301 of stack usage oriented flow analysis. */
1302 if (!cannot_accumulate)
1303 current_function_has_unbounded_dynamic_stack_size = 1;
1304 }
7fa774cd 1305
a903c451 1306 final_label = NULL;
48b14f50 1307 final_target = NULL_RTX;
1308
1309 /* If we are splitting the stack, we need to ask the backend whether
1310 there is enough room on the current stack. If there isn't, or if
1311 the backend doesn't know how to tell is, then we need to call a
1312 function to allocate memory in some other way. This memory will
1313 be released when we release the current stack segment. The
1314 effect is that stack allocation becomes less efficient, but at
1315 least it doesn't cause a stack overflow. */
1316 if (flag_split_stack)
1317 {
a903c451 1318 rtx_code_label *available_label;
1319 rtx ask, space, func;
48b14f50 1320
a903c451 1321 available_label = NULL;
48b14f50 1322
1323#ifdef HAVE_split_stack_space_check
1324 if (HAVE_split_stack_space_check)
1325 {
1326 available_label = gen_label_rtx ();
1327
1328 /* This instruction will branch to AVAILABLE_LABEL if there
1329 are SIZE bytes available on the stack. */
1330 emit_insn (gen_split_stack_space_check (size, available_label));
1331 }
1332#endif
1333
70c912cf 1334 /* The __morestack_allocate_stack_space function will allocate
d2461405 1335 memory using malloc. If the alignment of the memory returned
1336 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1337 make sure we allocate enough space. */
1338 if (MALLOC_ABI_ALIGNMENT >= required_align)
1339 ask = size;
1340 else
1341 {
1342 ask = expand_binop (Pmode, add_optab, size,
0359f9f5 1343 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1344 Pmode),
d2461405 1345 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1346 must_align = true;
1347 }
70c912cf 1348
48b14f50 1349 func = init_one_libfunc ("__morestack_allocate_stack_space");
1350
1351 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
70c912cf 1352 1, ask, Pmode);
48b14f50 1353
1354 if (available_label == NULL_RTX)
1355 return space;
1356
1357 final_target = gen_reg_rtx (Pmode);
48b14f50 1358
1359 emit_move_insn (final_target, space);
1360
1361 final_label = gen_label_rtx ();
1362 emit_jump (final_label);
1363
1364 emit_label (available_label);
1365 }
1366
7fa774cd 1367 do_pending_stack_adjust ();
1368
91b70175 1369 /* We ought to be called always on the toplevel and stack ought to be aligned
3fb1e43b 1370 properly. */
611234b4 1371 gcc_assert (!(stack_pointer_delta
1372 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
91b70175 1373
42982f3e 1374 /* If needed, check that we have the required amount of stack. Take into
1375 account what has already been checked. */
1376 if (STACK_CHECK_MOVING_SP)
1377 ;
1378 else if (flag_stack_check == GENERIC_STACK_CHECK)
4852b829 1379 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1380 size);
1381 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1382 probe_stack_range (STACK_CHECK_PROTECT, size);
382ff7aa 1383
4bb7c660 1384 /* Don't let anti_adjust_stack emit notes. */
1385 suppress_reg_args_size = true;
1386
7fa774cd 1387 /* Perform the required allocation from the stack. Some systems do
1388 this differently than simply incrementing/decrementing from the
941522d6 1389 stack pointer, such as acquiring the space by calling malloc(). */
7fa774cd 1390#ifdef HAVE_allocate_stack
1391 if (HAVE_allocate_stack)
1392 {
8786db1e 1393 struct expand_operand ops[2];
479e4d5e 1394 /* We don't have to check against the predicate for operand 0 since
1395 TARGET is known to be a pseudo of the proper mode, which must
8786db1e 1396 be valid for the operand. */
1397 create_fixed_operand (&ops[0], target);
1398 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1399 expand_insn (CODE_FOR_allocate_stack, 2, ops);
7fa774cd 1400 }
1401 else
1402#endif
184aad03 1403 {
60778e62 1404 int saved_stack_pointer_delta;
1405
3764c94e 1406 if (!STACK_GROWS_DOWNWARD)
1407 emit_move_insn (target, virtual_stack_dynamic_rtx);
8f8ac140 1408
1409 /* Check stack bounds if necessary. */
18d50ae6 1410 if (crtl->limit_stack)
8f8ac140 1411 {
1412 rtx available;
a903c451 1413 rtx_code_label *space_available = gen_label_rtx ();
3764c94e 1414 if (STACK_GROWS_DOWNWARD)
1415 available = expand_binop (Pmode, sub_optab,
1416 stack_pointer_rtx, stack_limit_rtx,
1417 NULL_RTX, 1, OPTAB_WIDEN);
1418 else
1419 available = expand_binop (Pmode, sub_optab,
1420 stack_limit_rtx, stack_pointer_rtx,
1421 NULL_RTX, 1, OPTAB_WIDEN);
1422
8f8ac140 1423 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
2b96c5f6 1424 space_available);
8f8ac140 1425#ifdef HAVE_trap
1426 if (HAVE_trap)
1427 emit_insn (gen_trap ());
1428 else
1429#endif
1430 error ("stack limits not supported on this target");
1431 emit_barrier ();
1432 emit_label (space_available);
1433 }
1434
60778e62 1435 saved_stack_pointer_delta = stack_pointer_delta;
dfe00a8f 1436
42982f3e 1437 if (flag_stack_check && STACK_CHECK_MOVING_SP)
d1b92264 1438 anti_adjust_stack_and_probe (size, false);
42982f3e 1439 else
1440 anti_adjust_stack (size);
dfe00a8f 1441
60778e62 1442 /* Even if size is constant, don't modify stack_pointer_delta.
1443 The constant size alloca should preserve
1444 crtl->preferred_stack_boundary alignment. */
1445 stack_pointer_delta = saved_stack_pointer_delta;
15c6cf6b 1446
3764c94e 1447 if (STACK_GROWS_DOWNWARD)
1448 emit_move_insn (target, virtual_stack_dynamic_rtx);
941522d6 1449 }
7fa774cd 1450
4bb7c660 1451 suppress_reg_args_size = false;
1452
5be42b39 1453 /* Finish up the split stack handling. */
1454 if (final_label != NULL_RTX)
1455 {
1456 gcc_assert (flag_split_stack);
1457 emit_move_insn (final_target, target);
1458 emit_label (final_label);
1459 target = final_target;
1460 }
1461
1462 if (must_align)
83ad791a 1463 {
8c08cc16 1464 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
a92771b8 1465 but we know it can't. So add ourselves and then do
1466 TRUNC_DIV_EXPR. */
6e8c172a 1467 target = expand_binop (Pmode, add_optab, target,
0359f9f5 1468 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1469 Pmode),
8c08cc16 1470 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1471 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
0359f9f5 1472 gen_int_mode (required_align / BITS_PER_UNIT,
1473 Pmode),
50b0c9ee 1474 NULL_RTX, 1);
83ad791a 1475 target = expand_mult (Pmode, target,
0359f9f5 1476 gen_int_mode (required_align / BITS_PER_UNIT,
1477 Pmode),
50b0c9ee 1478 NULL_RTX, 1);
83ad791a 1479 }
6be48139 1480
5be42b39 1481 /* Now that we've committed to a return value, mark its alignment. */
1482 mark_reg_pointer (target, required_align);
1483
97354ae4 1484 /* Record the new stack level. */
1485 record_new_stack_level ();
c0a9c3cd 1486
7fa774cd 1487 return target;
1488}
1489\f
6be48139 1490/* A front end may want to override GCC's stack checking by providing a
155b05dc 1491 run-time routine to call to check the stack, so provide a mechanism for
1492 calling that routine. */
1493
1f3233d1 1494static GTY(()) rtx stack_check_libfunc;
155b05dc 1495
1496void
e3805e9e 1497set_stack_check_libfunc (const char *libfunc_name)
155b05dc 1498{
e3805e9e 1499 gcc_assert (stack_check_libfunc == NULL_RTX);
1500 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
155b05dc 1501}
1502\f
382ff7aa 1503/* Emit one stack probe at ADDRESS, an address within the stack. */
1504
f164c08a 1505void
35cb5232 1506emit_stack_probe (rtx address)
382ff7aa 1507{
28d5c3d9 1508#ifdef HAVE_probe_stack_address
1509 if (HAVE_probe_stack_address)
1510 emit_insn (gen_probe_stack_address (address));
1511 else
1512#endif
1513 {
1514 rtx memref = gen_rtx_MEM (word_mode, address);
382ff7aa 1515
28d5c3d9 1516 MEM_VOLATILE_P (memref) = 1;
382ff7aa 1517
28d5c3d9 1518 /* See if we have an insn to probe the stack. */
42982f3e 1519#ifdef HAVE_probe_stack
28d5c3d9 1520 if (HAVE_probe_stack)
1521 emit_insn (gen_probe_stack (memref));
1522 else
42982f3e 1523#endif
28d5c3d9 1524 emit_move_insn (memref, const0_rtx);
1525 }
382ff7aa 1526}
1527
6be48139 1528/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
42982f3e 1529 FIRST is a constant and size is a Pmode RTX. These are offsets from
1530 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1531 or subtract them from the stack pointer. */
1532
1533#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
382ff7aa 1534
2b785411 1535#if STACK_GROWS_DOWNWARD
382ff7aa 1536#define STACK_GROW_OP MINUS
42982f3e 1537#define STACK_GROW_OPTAB sub_optab
1538#define STACK_GROW_OFF(off) -(off)
382ff7aa 1539#else
1540#define STACK_GROW_OP PLUS
42982f3e 1541#define STACK_GROW_OPTAB add_optab
1542#define STACK_GROW_OFF(off) (off)
382ff7aa 1543#endif
1544
1545void
35cb5232 1546probe_stack_range (HOST_WIDE_INT first, rtx size)
382ff7aa 1547{
479e4d5e 1548 /* First ensure SIZE is Pmode. */
1549 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1550 size = convert_to_mode (Pmode, size, 1);
1551
42982f3e 1552 /* Next see if we have a function to check the stack. */
1553 if (stack_check_libfunc)
024e3108 1554 {
42982f3e 1555 rtx addr = memory_address (Pmode,
8da1563c 1556 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1557 stack_pointer_rtx,
29c05e22 1558 plus_constant (Pmode,
1559 size, first)));
53226a3c 1560 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1561 Pmode);
024e3108 1562 }
155b05dc 1563
42982f3e 1564 /* Next see if we have an insn to check the stack. */
382ff7aa 1565#ifdef HAVE_check_stack
15b37e31 1566 else if (HAVE_check_stack)
382ff7aa 1567 {
8786db1e 1568 struct expand_operand ops[1];
42982f3e 1569 rtx addr = memory_address (Pmode,
1570 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1571 stack_pointer_rtx,
29c05e22 1572 plus_constant (Pmode,
1573 size, first)));
15b37e31 1574 bool success;
8786db1e 1575 create_input_operand (&ops[0], addr, Pmode);
15b37e31 1576 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1577 gcc_assert (success);
382ff7aa 1578 }
1579#endif
1580
42982f3e 1581 /* Otherwise we have to generate explicit probes. If we have a constant
1582 small number of them to generate, that's the easy case. */
1583 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
382ff7aa 1584 {
42982f3e 1585 HOST_WIDE_INT isize = INTVAL (size), i;
1586 rtx addr;
1587
1588 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1589 it exceeds SIZE. If only one probe is needed, this will not
1590 generate any code. Then probe at FIRST + SIZE. */
1591 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1592 {
1593 addr = memory_address (Pmode,
29c05e22 1594 plus_constant (Pmode, stack_pointer_rtx,
42982f3e 1595 STACK_GROW_OFF (first + i)));
1596 emit_stack_probe (addr);
1597 }
1598
1599 addr = memory_address (Pmode,
29c05e22 1600 plus_constant (Pmode, stack_pointer_rtx,
42982f3e 1601 STACK_GROW_OFF (first + isize)));
1602 emit_stack_probe (addr);
382ff7aa 1603 }
1604
42982f3e 1605 /* In the variable case, do the same as above, but in a loop. Note that we
1606 must be extra careful with variables wrapping around because we might be
1607 at the very top (or the very bottom) of the address space and we have to
1608 be able to handle this case properly; in particular, we use an equality
1609 test for the loop condition. */
382ff7aa 1610 else
1611 {
42982f3e 1612 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
a903c451 1613 rtx_code_label *loop_lab = gen_label_rtx ();
1614 rtx_code_label *end_lab = gen_label_rtx ();
382ff7aa 1615
42982f3e 1616 /* Step 1: round SIZE to the previous multiple of the interval. */
1617
1618 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1619 rounded_size
5d5ee71f 1620 = simplify_gen_binary (AND, Pmode, size,
1621 gen_int_mode (-PROBE_INTERVAL, Pmode));
42982f3e 1622 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1623
1624
1625 /* Step 2: compute initial and final value of the loop counter. */
1626
1627 /* TEST_ADDR = SP + FIRST. */
1628 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1629 stack_pointer_rtx,
c338f2e3 1630 gen_int_mode (first, Pmode)),
1631 NULL_RTX);
42982f3e 1632
1633 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1634 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1635 test_addr,
1636 rounded_size_op), NULL_RTX);
1637
1638
1639 /* Step 3: the loop
1640
1641 while (TEST_ADDR != LAST_ADDR)
1642 {
1643 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1644 probe at TEST_ADDR
1645 }
1646
1647 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1648 until it is equal to ROUNDED_SIZE. */
382ff7aa 1649
1650 emit_label (loop_lab);
382ff7aa 1651
42982f3e 1652 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1653 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1654 end_lab);
1655
1656 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1657 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
0359f9f5 1658 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
382ff7aa 1659 1, OPTAB_WIDEN);
382ff7aa 1660
611234b4 1661 gcc_assert (temp == test_addr);
382ff7aa 1662
42982f3e 1663 /* Probe at TEST_ADDR. */
1664 emit_stack_probe (test_addr);
1665
1666 emit_jump (loop_lab);
1667
382ff7aa 1668 emit_label (end_lab);
1669
42982f3e 1670
1671 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1672 that SIZE is equal to ROUNDED_SIZE. */
1673
1674 /* TEMP = SIZE - ROUNDED_SIZE. */
1675 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1676 if (temp != const0_rtx)
1677 {
1678 rtx addr;
1679
60778e62 1680 if (CONST_INT_P (temp))
42982f3e 1681 {
1682 /* Use [base + disp} addressing mode if supported. */
1683 HOST_WIDE_INT offset = INTVAL (temp);
1684 addr = memory_address (Pmode,
29c05e22 1685 plus_constant (Pmode, last_addr,
42982f3e 1686 STACK_GROW_OFF (offset)));
1687 }
1688 else
1689 {
1690 /* Manual CSE if the difference is not known at compile-time. */
1691 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1692 addr = memory_address (Pmode,
1693 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1694 last_addr, temp));
1695 }
1696
1697 emit_stack_probe (addr);
1698 }
382ff7aa 1699 }
d9d7686b 1700
1701 /* Make sure nothing is scheduled before we are done. */
1702 emit_insn (gen_blockage ());
382ff7aa 1703}
42982f3e 1704
d1b92264 1705/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1706 while probing it. This pushes when SIZE is positive. SIZE need not
1707 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1708 by plus SIZE at the end. */
42982f3e 1709
d1b92264 1710void
1711anti_adjust_stack_and_probe (rtx size, bool adjust_back)
42982f3e 1712{
d1b92264 1713 /* We skip the probe for the first interval + a small dope of 4 words and
1714 probe that many bytes past the specified size to maintain a protection
1715 area at the botton of the stack. */
42982f3e 1716 const int dope = 4 * UNITS_PER_WORD;
1717
1718 /* First ensure SIZE is Pmode. */
1719 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1720 size = convert_to_mode (Pmode, size, 1);
1721
1722 /* If we have a constant small number of probes to generate, that's the
1723 easy case. */
60778e62 1724 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
42982f3e 1725 {
1726 HOST_WIDE_INT isize = INTVAL (size), i;
1727 bool first_probe = true;
1728
f164c08a 1729 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
42982f3e 1730 values of N from 1 until it exceeds SIZE. If only one probe is
1731 needed, this will not generate any code. Then adjust and probe
1732 to PROBE_INTERVAL + SIZE. */
1733 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1734 {
1735 if (first_probe)
1736 {
1737 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1738 first_probe = false;
1739 }
1740 else
1741 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1742 emit_stack_probe (stack_pointer_rtx);
1743 }
1744
1745 if (first_probe)
29c05e22 1746 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
42982f3e 1747 else
29c05e22 1748 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
42982f3e 1749 emit_stack_probe (stack_pointer_rtx);
1750 }
1751
1752 /* In the variable case, do the same as above, but in a loop. Note that we
1753 must be extra careful with variables wrapping around because we might be
1754 at the very top (or the very bottom) of the address space and we have to
1755 be able to handle this case properly; in particular, we use an equality
1756 test for the loop condition. */
1757 else
1758 {
1759 rtx rounded_size, rounded_size_op, last_addr, temp;
a903c451 1760 rtx_code_label *loop_lab = gen_label_rtx ();
1761 rtx_code_label *end_lab = gen_label_rtx ();
42982f3e 1762
1763
1764 /* Step 1: round SIZE to the previous multiple of the interval. */
1765
1766 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1767 rounded_size
5d5ee71f 1768 = simplify_gen_binary (AND, Pmode, size,
1769 gen_int_mode (-PROBE_INTERVAL, Pmode));
42982f3e 1770 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1771
1772
1773 /* Step 2: compute initial and final value of the loop counter. */
1774
1775 /* SP = SP_0 + PROBE_INTERVAL. */
1776 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1777
1778 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1779 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1780 stack_pointer_rtx,
1781 rounded_size_op), NULL_RTX);
1782
1783
1784 /* Step 3: the loop
1785
f164c08a 1786 while (SP != LAST_ADDR)
1787 {
1788 SP = SP + PROBE_INTERVAL
1789 probe at SP
1790 }
42982f3e 1791
f164c08a 1792 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
42982f3e 1793 values of N from 1 until it is equal to ROUNDED_SIZE. */
1794
1795 emit_label (loop_lab);
1796
1797 /* Jump to END_LAB if SP == LAST_ADDR. */
1798 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1799 Pmode, 1, end_lab);
1800
1801 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1802 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1803 emit_stack_probe (stack_pointer_rtx);
1804
1805 emit_jump (loop_lab);
1806
1807 emit_label (end_lab);
1808
1809
f164c08a 1810 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
42982f3e 1811 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1812
1813 /* TEMP = SIZE - ROUNDED_SIZE. */
1814 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1815 if (temp != const0_rtx)
1816 {
1817 /* Manual CSE if the difference is not known at compile-time. */
1818 if (GET_CODE (temp) != CONST_INT)
1819 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1820 anti_adjust_stack (temp);
1821 emit_stack_probe (stack_pointer_rtx);
1822 }
1823 }
1824
d1b92264 1825 /* Adjust back and account for the additional first interval. */
1826 if (adjust_back)
29c05e22 1827 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d1b92264 1828 else
1829 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
42982f3e 1830}
1831
7fa774cd 1832/* Return an rtx representing the register or memory location
1833 in which a scalar value of data type VALTYPE
1834 was returned by a function call to function FUNC.
46b3ff29 1835 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1836 function is known, otherwise 0.
16beb099 1837 OUTGOING is 1 if on a machine with register windows this function
1838 should return the register in which the function will put its result
6312a35e 1839 and 0 otherwise. */
7fa774cd 1840
1841rtx
fb80456a 1842hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
35cb5232 1843 int outgoing ATTRIBUTE_UNUSED)
7fa774cd 1844{
16beb099 1845 rtx val;
02e7a332 1846
46b3ff29 1847 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
02e7a332 1848
8ad4c111 1849 if (REG_P (val)
883e35f4 1850 && GET_MODE (val) == BLKmode)
1851 {
02e7a332 1852 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
3754d046 1853 machine_mode tmpmode;
02e7a332 1854
6be48139 1855 /* int_size_in_bytes can return -1. We don't need a check here
89f18f73 1856 since the value of bytes will then be large enough that no
1857 mode will match anyway. */
6be48139 1858
883e35f4 1859 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
ff385626 1860 tmpmode != VOIDmode;
1861 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1862 {
1863 /* Have we found a large enough mode? */
1864 if (GET_MODE_SIZE (tmpmode) >= bytes)
1865 break;
1866 }
883e35f4 1867
1868 /* No suitable mode found. */
611234b4 1869 gcc_assert (tmpmode != VOIDmode);
883e35f4 1870
1871 PUT_MODE (val, tmpmode);
6be48139 1872 }
883e35f4 1873 return val;
7fa774cd 1874}
1875
1876/* Return an rtx representing the register or memory location
1877 in which a scalar value of mode MODE was returned by a library call. */
1878
1879rtx
3754d046 1880hard_libcall_value (machine_mode mode, rtx fun)
7fa774cd 1881{
578d1295 1882 return targetm.calls.libcall_value (mode, fun);
7fa774cd 1883}
0ff6d058 1884
1885/* Look up the tree code for a given rtx code
1886 to provide the arithmetic operation for REAL_ARITHMETIC.
1887 The function returns an int because the caller may not know
1888 what `enum tree_code' means. */
1889
1890int
35cb5232 1891rtx_to_tree_code (enum rtx_code code)
0ff6d058 1892{
1893 enum tree_code tcode;
1894
1895 switch (code)
1896 {
1897 case PLUS:
1898 tcode = PLUS_EXPR;
1899 break;
1900 case MINUS:
1901 tcode = MINUS_EXPR;
1902 break;
1903 case MULT:
1904 tcode = MULT_EXPR;
1905 break;
1906 case DIV:
1907 tcode = RDIV_EXPR;
1908 break;
1909 case SMIN:
1910 tcode = MIN_EXPR;
1911 break;
1912 case SMAX:
1913 tcode = MAX_EXPR;
1914 break;
1915 default:
1916 tcode = LAST_AND_UNUSED_TREE_CODE;
1917 break;
1918 }
1919 return ((int) tcode);
1920}
1f3233d1 1921
1922#include "gt-explow.h"