]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
2015-07-07 Andrew MacLeod <amacleod@redhat.com>
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
7fa774cd 1/* Subroutines for manipulating rtx's in semantically interesting ways.
d353bf18 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
7fa774cd 3
f12b58b3 4This file is part of GCC.
7fa774cd 5
f12b58b3 6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
8c4c00c1 8Software Foundation; either version 3, or (at your option) any later
f12b58b3 9version.
7fa774cd 10
f12b58b3 11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
7fa774cd 15
16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
7fa774cd 19
20
21#include "config.h"
405711de 22#include "system.h"
805e22b2 23#include "coretypes.h"
24#include "tm.h"
0b205f4c 25#include "diagnostic-core.h"
7fa774cd 26#include "rtl.h"
b20a8bb4 27#include "alias.h"
7fa774cd 28#include "tree.h"
9ed99284 29#include "stor-layout.h"
7953c610 30#include "tm_p.h"
7fa774cd 31#include "flags.h"
4852b829 32#include "except.h"
0a893c29 33#include "function.h"
d53441c8 34#include "insn-config.h"
35#include "expmed.h"
36#include "dojump.h"
37#include "explow.h"
38#include "calls.h"
39#include "emit-rtl.h"
40#include "varasm.h"
41#include "stmt.h"
7fa774cd 42#include "expr.h"
34517c64 43#include "insn-codes.h"
d8fc4d0b 44#include "optabs.h"
e3805e9e 45#include "libfuncs.h"
7fa774cd 46#include "recog.h"
c3f16ae3 47#include "langhooks.h"
46b3ff29 48#include "target.h"
218e3e4e 49#include "common/common-target.h"
f2d0e9f1 50#include "output.h"
7fa774cd 51
35cb5232 52static rtx break_out_memory_refs (rtx);
b2345915 53
54
55/* Truncate and perhaps sign-extend C as appropriate for MODE. */
56
57HOST_WIDE_INT
3754d046 58trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
b2345915 59{
ded805e6 60 int width = GET_MODE_PRECISION (mode);
b2345915 61
dd067362 62 /* You want to truncate to a _what_? */
058a1b7a 63 gcc_assert (SCALAR_INT_MODE_P (mode)
64 || POINTER_BOUNDS_MODE_P (mode));
dd067362 65
dea049dc 66 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
67 if (mode == BImode)
68 return c & 1 ? STORE_FLAG_VALUE : 0;
69
679dcb76 70 /* Sign-extend for the requested mode. */
71
72 if (width < HOST_BITS_PER_WIDE_INT)
73 {
74 HOST_WIDE_INT sign = 1;
75 sign <<= width - 1;
76 c &= (sign << 1) - 1;
77 c ^= sign;
78 c -= sign;
79 }
b2345915 80
81 return c;
82}
83
db20fb47 84/* Return an rtx for the sum of X and the integer C, given that X has
5cc04e45 85 mode MODE. INPLACE is true if X can be modified inplace or false
86 if it must be treated as immutable. */
7fa774cd 87
88rtx
3754d046 89plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
5cc04e45 90 bool inplace)
7fa774cd 91{
19cb6b50 92 RTX_CODE code;
a42e6220 93 rtx y;
19cb6b50 94 rtx tem;
7fa774cd 95 int all_constant = 0;
96
29c05e22 97 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
98
7fa774cd 99 if (c == 0)
100 return x;
101
102 restart:
103
104 code = GET_CODE (x);
a42e6220 105 y = x;
106
7fa774cd 107 switch (code)
108 {
e913b5cd 109 CASE_CONST_SCALAR_INT:
796b6678 110 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
111 mode);
7fa774cd 112 case MEM:
113 /* If this is a reference to the constant pool, try replacing it with
114 a reference to a new constant. If the resulting address isn't
115 valid, don't return it because we have no way to validize it. */
116 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
117 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
118 {
29c05e22 119 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
db20fb47 120 tem = force_const_mem (GET_MODE (x), tem);
2effb064 121 /* Targets may disallow some constants in the constant pool, thus
122 force_const_mem may return NULL_RTX. */
123 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
7fa774cd 124 return tem;
125 }
126 break;
127
128 case CONST:
129 /* If adding to something entirely constant, set a flag
130 so that we can add a CONST around the result. */
5cc04e45 131 if (inplace && shared_const_p (x))
132 inplace = false;
7fa774cd 133 x = XEXP (x, 0);
134 all_constant = 1;
135 goto restart;
136
137 case SYMBOL_REF:
138 case LABEL_REF:
139 all_constant = 1;
140 break;
141
142 case PLUS:
db20fb47 143 /* The interesting case is adding the integer to a sum. Look
144 for constant term in the sum and combine with C. For an
145 integer constant term or a constant term that is not an
146 explicit integer, we combine or group them together anyway.
986b0677 147
148 We may not immediately return from the recursive call here, lest
149 all_constant gets lost. */
530f560b 150
db20fb47 151 if (CONSTANT_P (XEXP (x, 1)))
986b0677 152 {
5cc04e45 153 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
154 if (term == const0_rtx)
155 x = XEXP (x, 0);
156 else if (inplace)
157 XEXP (x, 1) = term;
158 else
159 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
986b0677 160 c = 0;
161 }
5cc04e45 162 else if (rtx *const_loc = find_constant_term_loc (&y))
986b0677 163 {
5cc04e45 164 if (!inplace)
165 {
166 /* We need to be careful since X may be shared and we can't
167 modify it in place. */
168 x = copy_rtx (x);
169 const_loc = find_constant_term_loc (&x);
170 }
171 *const_loc = plus_constant (mode, *const_loc, c, true);
986b0677 172 c = 0;
173 }
941522d6 174 break;
b244d4c7 175
941522d6 176 default:
177 break;
7fa774cd 178 }
179
180 if (c != 0)
c338f2e3 181 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
7fa774cd 182
183 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
184 return x;
185 else if (all_constant)
941522d6 186 return gen_rtx_CONST (mode, x);
7fa774cd 187 else
188 return x;
189}
7fa774cd 190\f
191/* If X is a sum, return a new sum like X but lacking any constant terms.
192 Add all the removed constant terms into *CONSTPTR.
193 X itself is not altered. The result != X if and only if
194 it is not isomorphic to X. */
195
196rtx
35cb5232 197eliminate_constant_term (rtx x, rtx *constptr)
7fa774cd 198{
19cb6b50 199 rtx x0, x1;
7fa774cd 200 rtx tem;
201
202 if (GET_CODE (x) != PLUS)
203 return x;
204
205 /* First handle constants appearing at this level explicitly. */
971ba038 206 if (CONST_INT_P (XEXP (x, 1))
7fa774cd 207 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
208 XEXP (x, 1)))
971ba038 209 && CONST_INT_P (tem))
7fa774cd 210 {
211 *constptr = tem;
212 return eliminate_constant_term (XEXP (x, 0), constptr);
213 }
214
215 tem = const0_rtx;
216 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
217 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
218 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
219 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
220 *constptr, tem))
971ba038 221 && CONST_INT_P (tem))
7fa774cd 222 {
223 *constptr = tem;
941522d6 224 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
7fa774cd 225 }
226
227 return x;
228}
229
7fa774cd 230\f
231/* Return a copy of X in which all memory references
232 and all constants that involve symbol refs
233 have been replaced with new temporary registers.
234 Also emit code to load the memory locations and constants
235 into those registers.
236
237 If X contains no such constants or memory references,
238 X itself (not a copy) is returned.
239
240 If a constant is found in the address that is not a legitimate constant
241 in an insn, it is left alone in the hope that it might be valid in the
242 address.
243
244 X may contain no arithmetic except addition, subtraction and multiplication.
245 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
246
247static rtx
35cb5232 248break_out_memory_refs (rtx x)
7fa774cd 249{
e16ceb8e 250 if (MEM_P (x)
e6f7d557 251 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
7fa774cd 252 && GET_MODE (x) != VOIDmode))
46c86782 253 x = force_reg (GET_MODE (x), x);
7fa774cd 254 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
255 || GET_CODE (x) == MULT)
256 {
19cb6b50 257 rtx op0 = break_out_memory_refs (XEXP (x, 0));
258 rtx op1 = break_out_memory_refs (XEXP (x, 1));
46c86782 259
7fa774cd 260 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
98155838 261 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
7fa774cd 262 }
46c86782 263
7fa774cd 264 return x;
265}
266
98155838 267/* Given X, a memory address in address space AS' pointer mode, convert it to
268 an address in the address space's address mode, or vice versa (TO_MODE says
269 which way). We take advantage of the fact that pointers are not allowed to
270 overflow by commuting arithmetic operations over conversions so that address
98396fac 271 arithmetic insns can be used. IN_CONST is true if this conversion is inside
272 a CONST. */
184aad03 273
98396fac 274static rtx
3754d046 275convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
98396fac 276 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
b8f31768 277 bool in_const ATTRIBUTE_UNUSED)
184aad03 278{
85d654dd 279#ifndef POINTERS_EXTEND_UNSIGNED
42f5572e 280 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
85d654dd 281 return x;
282#else /* defined(POINTERS_EXTEND_UNSIGNED) */
3754d046 283 machine_mode pointer_mode, address_mode, from_mode;
4a8e9301 284 rtx temp;
e5716f7e 285 enum rtx_code code;
4a8e9301 286
85d654dd 287 /* If X already has the right mode, just return it. */
288 if (GET_MODE (x) == to_mode)
289 return x;
290
98155838 291 pointer_mode = targetm.addr_space.pointer_mode (as);
292 address_mode = targetm.addr_space.address_mode (as);
293 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
85d654dd 294
52df3724 295 /* Here we handle some special cases. If none of them apply, fall through
296 to the default case. */
184aad03 297 switch (GET_CODE (x))
298 {
0349edce 299 CASE_CONST_SCALAR_INT:
e5716f7e 300 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
301 code = TRUNCATE;
302 else if (POINTERS_EXTEND_UNSIGNED < 0)
303 break;
304 else if (POINTERS_EXTEND_UNSIGNED > 0)
305 code = ZERO_EXTEND;
306 else
307 code = SIGN_EXTEND;
308 temp = simplify_unary_operation (code, to_mode, x, from_mode);
309 if (temp)
310 return temp;
311 break;
4a8e9301 312
bc17f7a4 313 case SUBREG:
9fd73f31 314 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
3cc092f7 315 && GET_MODE (SUBREG_REG (x)) == to_mode)
bc17f7a4 316 return SUBREG_REG (x);
317 break;
318
184aad03 319 case LABEL_REF:
b49f2e4b 320 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
9fd73f31 321 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
322 return temp;
3cc092f7 323 break;
4a8e9301 324
184aad03 325 case SYMBOL_REF:
ae5242d3 326 temp = shallow_copy_rtx (x);
327 PUT_MODE (temp, to_mode);
9fd73f31 328 return temp;
3cc092f7 329 break;
184aad03 330
4a8e9301 331 case CONST:
9fd73f31 332 return gen_rtx_CONST (to_mode,
98396fac 333 convert_memory_address_addr_space_1
334 (to_mode, XEXP (x, 0), as, true));
3cc092f7 335 break;
184aad03 336
52df3724 337 case PLUS:
338 case MULT:
aeb17b4b 339 /* For addition we can safely permute the conversion and addition
340 operation if one operand is a constant and converting the constant
341 does not change it or if one operand is a constant and we are
342 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
2800ac1a 343 We can always safely permute them if we are making the address
98396fac 344 narrower. Inside a CONST RTL, this is safe for both pointers
345 zero or sign extended as pointers cannot wrap. */
e5716f7e 346 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
347 || (GET_CODE (x) == PLUS
971ba038 348 && CONST_INT_P (XEXP (x, 1))
98396fac 349 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
350 || XEXP (x, 1) == convert_memory_address_addr_space_1
351 (to_mode, XEXP (x, 1), as, in_const)
352 || POINTERS_EXTEND_UNSIGNED < 0)))
6be48139 353 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
98396fac 354 convert_memory_address_addr_space_1
355 (to_mode, XEXP (x, 0), as, in_const),
e5716f7e 356 XEXP (x, 1));
941522d6 357 break;
6be48139 358
941522d6 359 default:
360 break;
184aad03 361 }
52df3724 362
363 return convert_modes (to_mode, from_mode,
364 x, POINTERS_EXTEND_UNSIGNED);
85d654dd 365#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
184aad03 366}
98396fac 367
368/* Given X, a memory address in address space AS' pointer mode, convert it to
369 an address in the address space's address mode, or vice versa (TO_MODE says
370 which way). We take advantage of the fact that pointers are not allowed to
371 overflow by commuting arithmetic operations over conversions so that address
372 arithmetic insns can be used. */
373
374rtx
3754d046 375convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
98396fac 376{
377 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
378}
7fa774cd 379\f
d53441c8 380
bd1a81f7 381/* Return something equivalent to X but valid as a memory address for something
382 of mode MODE in the named address space AS. When X is not itself valid,
383 this works by copying X or subexpressions of it into registers. */
7fa774cd 384
385rtx
3754d046 386memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
7fa774cd 387{
19cb6b50 388 rtx oldx = x;
3754d046 389 machine_mode address_mode = targetm.addr_space.address_mode (as);
7fa774cd 390
98155838 391 x = convert_memory_address_addr_space (address_mode, x, as);
184aad03 392
c7bf1374 393 /* By passing constant addresses through registers
7fa774cd 394 we get a chance to cse them. */
e6f7d557 395 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
98155838 396 x = force_reg (address_mode, x);
7fa774cd 397
7fa774cd 398 /* We get better cse by rejecting indirect addressing at this stage.
399 Let the combiner create indirect addresses where appropriate.
400 For now, generate the code so that the subexpressions useful to share
401 are visible. But not if cse won't be done! */
3a6d729e 402 else
7fa774cd 403 {
8ad4c111 404 if (! cse_not_expected && !REG_P (x))
3a6d729e 405 x = break_out_memory_refs (x);
406
407 /* At this point, any valid address is accepted. */
bd1a81f7 408 if (memory_address_addr_space_p (mode, x, as))
4d25f9eb 409 goto done;
3a6d729e 410
411 /* If it was valid before but breaking out memory refs invalidated it,
412 use it the old way. */
bd1a81f7 413 if (memory_address_addr_space_p (mode, oldx, as))
4d25f9eb 414 {
415 x = oldx;
416 goto done;
417 }
3a6d729e 418
419 /* Perform machine-dependent transformations on X
420 in certain cases. This is not necessary since the code
421 below can handle all possible cases, but machine-dependent
422 transformations can make better code. */
41e3a0c7 423 {
bd1a81f7 424 rtx orig_x = x;
425 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
426 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
41e3a0c7 427 goto done;
428 }
3a6d729e 429
430 /* PLUS and MULT can appear in special ways
431 as the result of attempts to make an address usable for indexing.
432 Usually they are dealt with by calling force_operand, below.
433 But a sum containing constant terms is special
434 if removing them makes the sum a valid address:
435 then we generate that address in a register
436 and index off of it. We do this because it often makes
437 shorter code, and because the addresses thus generated
438 in registers often become common subexpressions. */
439 if (GET_CODE (x) == PLUS)
440 {
441 rtx constant_term = const0_rtx;
442 rtx y = eliminate_constant_term (x, &constant_term);
443 if (constant_term == const0_rtx
bd1a81f7 444 || ! memory_address_addr_space_p (mode, y, as))
3a6d729e 445 x = force_operand (x, NULL_RTX);
446 else
447 {
941522d6 448 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
bd1a81f7 449 if (! memory_address_addr_space_p (mode, y, as))
3a6d729e 450 x = force_operand (x, NULL_RTX);
451 else
452 x = y;
453 }
454 }
7fa774cd 455
492820ac 456 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
3a6d729e 457 x = force_operand (x, NULL_RTX);
7fa774cd 458
3a6d729e 459 /* If we have a register that's an invalid address,
460 it must be a hard reg of the wrong class. Copy it to a pseudo. */
8ad4c111 461 else if (REG_P (x))
3a6d729e 462 x = copy_to_reg (x);
463
464 /* Last resort: copy the value to a register, since
465 the register is a valid address. */
466 else
98155838 467 x = force_reg (address_mode, x);
7fa774cd 468 }
3a6d729e 469
470 done:
471
bd1a81f7 472 gcc_assert (memory_address_addr_space_p (mode, x, as));
46c86782 473 /* If we didn't change the address, we are done. Otherwise, mark
474 a reg as a pointer if we have REG or REG + CONST_INT. */
475 if (oldx == x)
476 return x;
8ad4c111 477 else if (REG_P (x))
80909c64 478 mark_reg_pointer (x, BITS_PER_UNIT);
46c86782 479 else if (GET_CODE (x) == PLUS
8ad4c111 480 && REG_P (XEXP (x, 0))
971ba038 481 && CONST_INT_P (XEXP (x, 1)))
80909c64 482 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
46c86782 483
3a6d729e 484 /* OLDX may have been the address on a temporary. Update the address
485 to indicate that X is now used. */
486 update_temp_slot_address (oldx, x);
487
7fa774cd 488 return x;
489}
490
d2b9158b 491/* If REF is a MEM with an invalid address, change it into a valid address.
492 Pass through anything else unchanged. REF must be an unshared rtx and
493 the function may modify it in-place. */
7fa774cd 494
495rtx
35cb5232 496validize_mem (rtx ref)
7fa774cd 497{
e16ceb8e 498 if (!MEM_P (ref))
7fa774cd 499 return ref;
f2d0e9f1 500 ref = use_anchored_address (ref);
bd1a81f7 501 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
502 MEM_ADDR_SPACE (ref)))
7fa774cd 503 return ref;
537ffcfc 504
d2b9158b 505 return replace_equiv_address (ref, XEXP (ref, 0), true);
7fa774cd 506}
f2d0e9f1 507
508/* If X is a memory reference to a member of an object block, try rewriting
509 it to use an anchor instead. Return the new memory reference on success
510 and the old one on failure. */
511
512rtx
513use_anchored_address (rtx x)
514{
515 rtx base;
516 HOST_WIDE_INT offset;
3754d046 517 machine_mode mode;
f2d0e9f1 518
519 if (!flag_section_anchors)
520 return x;
521
522 if (!MEM_P (x))
523 return x;
524
525 /* Split the address into a base and offset. */
526 base = XEXP (x, 0);
527 offset = 0;
528 if (GET_CODE (base) == CONST
529 && GET_CODE (XEXP (base, 0)) == PLUS
971ba038 530 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
f2d0e9f1 531 {
532 offset += INTVAL (XEXP (XEXP (base, 0), 1));
533 base = XEXP (XEXP (base, 0), 0);
534 }
535
536 /* Check whether BASE is suitable for anchors. */
537 if (GET_CODE (base) != SYMBOL_REF
6617cbc1 538 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
f2d0e9f1 539 || SYMBOL_REF_ANCHOR_P (base)
102e3995 540 || SYMBOL_REF_BLOCK (base) == NULL
f2d0e9f1 541 || !targetm.use_anchors_for_symbol_p (base))
542 return x;
543
544 /* Decide where BASE is going to be. */
545 place_block_symbol (base);
546
547 /* Get the anchor we need to use. */
548 offset += SYMBOL_REF_BLOCK_OFFSET (base);
549 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
550 SYMBOL_REF_TLS_MODEL (base));
551
552 /* Work out the offset from the anchor. */
553 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
554
555 /* If we're going to run a CSE pass, force the anchor into a register.
556 We will then be able to reuse registers for several accesses, if the
557 target costs say that that's worthwhile. */
29c05e22 558 mode = GET_MODE (base);
f2d0e9f1 559 if (!cse_not_expected)
29c05e22 560 base = force_reg (mode, base);
f2d0e9f1 561
29c05e22 562 return replace_equiv_address (x, plus_constant (mode, base, offset));
f2d0e9f1 563}
7fa774cd 564\f
7fa774cd 565/* Copy the value or contents of X to a new temp reg and return that reg. */
566
567rtx
35cb5232 568copy_to_reg (rtx x)
7fa774cd 569{
19cb6b50 570 rtx temp = gen_reg_rtx (GET_MODE (x));
6be48139 571
7fa774cd 572 /* If not an operand, must be an address with PLUS and MULT so
6be48139 573 do the computation. */
7fa774cd 574 if (! general_operand (x, VOIDmode))
575 x = force_operand (x, temp);
6be48139 576
7fa774cd 577 if (x != temp)
578 emit_move_insn (temp, x);
579
580 return temp;
581}
582
583/* Like copy_to_reg but always give the new register mode Pmode
584 in case X is a constant. */
585
586rtx
35cb5232 587copy_addr_to_reg (rtx x)
7fa774cd 588{
589 return copy_to_mode_reg (Pmode, x);
590}
591
592/* Like copy_to_reg but always give the new register mode MODE
593 in case X is a constant. */
594
595rtx
3754d046 596copy_to_mode_reg (machine_mode mode, rtx x)
7fa774cd 597{
19cb6b50 598 rtx temp = gen_reg_rtx (mode);
6be48139 599
7fa774cd 600 /* If not an operand, must be an address with PLUS and MULT so
6be48139 601 do the computation. */
7fa774cd 602 if (! general_operand (x, VOIDmode))
603 x = force_operand (x, temp);
604
611234b4 605 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
7fa774cd 606 if (x != temp)
607 emit_move_insn (temp, x);
608 return temp;
609}
610
611/* Load X into a register if it is not already one.
612 Use mode MODE for the register.
613 X should be valid for mode MODE, but it may be a constant which
614 is valid for all integer modes; that's why caller must specify MODE.
615
616 The caller must not alter the value in the register we return,
617 since we mark it as a "constant" register. */
618
619rtx
3754d046 620force_reg (machine_mode mode, rtx x)
7fa774cd 621{
a903c451 622 rtx temp, set;
623 rtx_insn *insn;
7fa774cd 624
8ad4c111 625 if (REG_P (x))
7fa774cd 626 return x;
6be48139 627
fac6aae6 628 if (general_operand (x, mode))
629 {
630 temp = gen_reg_rtx (mode);
631 insn = emit_move_insn (temp, x);
632 }
633 else
634 {
635 temp = force_operand (x, NULL_RTX);
8ad4c111 636 if (REG_P (temp))
fac6aae6 637 insn = get_last_insn ();
638 else
639 {
640 rtx temp2 = gen_reg_rtx (mode);
641 insn = emit_move_insn (temp2, temp);
642 temp = temp2;
643 }
644 }
b3c85201 645
7fa774cd 646 /* Let optimizers know that TEMP's value never changes
b3c85201 647 and that X can be substituted for it. Don't get confused
648 if INSN set something else (such as a SUBREG of TEMP). */
649 if (CONSTANT_P (x)
650 && (set = single_set (insn)) != 0
63160ce9 651 && SET_DEST (set) == temp
652 && ! rtx_equal_p (x, SET_SRC (set)))
c080d8f0 653 set_unique_reg_note (insn, REG_EQUAL, x);
fac6aae6 654
62350d6c 655 /* Let optimizers know that TEMP is a pointer, and if so, the
656 known alignment of that pointer. */
657 {
658 unsigned align = 0;
659 if (GET_CODE (x) == SYMBOL_REF)
660 {
661 align = BITS_PER_UNIT;
662 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
663 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
664 }
665 else if (GET_CODE (x) == LABEL_REF)
666 align = BITS_PER_UNIT;
667 else if (GET_CODE (x) == CONST
668 && GET_CODE (XEXP (x, 0)) == PLUS
669 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
971ba038 670 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
62350d6c 671 {
672 rtx s = XEXP (XEXP (x, 0), 0);
673 rtx c = XEXP (XEXP (x, 0), 1);
674 unsigned sa, ca;
675
676 sa = BITS_PER_UNIT;
677 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
678 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
679
7e8d812e 680 if (INTVAL (c) == 0)
681 align = sa;
682 else
683 {
684 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
685 align = MIN (sa, ca);
686 }
62350d6c 687 }
688
40b93dba 689 if (align || (MEM_P (x) && MEM_POINTER (x)))
62350d6c 690 mark_reg_pointer (temp, align);
691 }
692
7fa774cd 693 return temp;
694}
695
696/* If X is a memory ref, copy its contents to a new temp reg and return
697 that reg. Otherwise, return X. */
698
699rtx
35cb5232 700force_not_mem (rtx x)
7fa774cd 701{
19cb6b50 702 rtx temp;
bf6a742f 703
e16ceb8e 704 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
7fa774cd 705 return x;
bf6a742f 706
7fa774cd 707 temp = gen_reg_rtx (GET_MODE (x));
8d350e69 708
709 if (MEM_POINTER (x))
710 REG_POINTER (temp) = 1;
711
7fa774cd 712 emit_move_insn (temp, x);
713 return temp;
714}
715
716/* Copy X to TARGET (if it's nonzero and a reg)
717 or to a new temp reg and return that reg.
718 MODE is the mode to use for X in case it is a constant. */
719
720rtx
3754d046 721copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
7fa774cd 722{
19cb6b50 723 rtx temp;
7fa774cd 724
8ad4c111 725 if (target && REG_P (target))
7fa774cd 726 temp = target;
727 else
728 temp = gen_reg_rtx (mode);
729
730 emit_move_insn (temp, x);
731 return temp;
732}
733\f
3b2411a8 734/* Return the mode to use to pass or return a scalar of TYPE and MODE.
f9aab3b6 735 PUNSIGNEDP points to the signedness of the type and may be adjusted
736 to show what signedness to use on extension operations.
737
3b2411a8 738 FOR_RETURN is nonzero if the caller is promoting the return value
739 of FNDECL, else it is for promoting args. */
f9aab3b6 740
3754d046 741machine_mode
742promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
3b2411a8 743 const_tree funtype, int for_return)
744{
adaf4ef0 745 /* Called without a type node for a libcall. */
746 if (type == NULL_TREE)
747 {
748 if (INTEGRAL_MODE_P (mode))
749 return targetm.calls.promote_function_mode (NULL_TREE, mode,
750 punsignedp, funtype,
751 for_return);
752 else
753 return mode;
754 }
755
3b2411a8 756 switch (TREE_CODE (type))
757 {
758 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
759 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
760 case POINTER_TYPE: case REFERENCE_TYPE:
761 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
762 for_return);
763
764 default:
765 return mode;
766 }
767}
768/* Return the mode to use to store a scalar of TYPE and MODE.
769 PUNSIGNEDP points to the signedness of the type and may be adjusted
770 to show what signedness to use on extension operations. */
19347327 771
3754d046 772machine_mode
773promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
2888b920 774 int *punsignedp ATTRIBUTE_UNUSED)
f9aab3b6 775{
49130e9a 776#ifdef PROMOTE_MODE
777 enum tree_code code;
778 int unsignedp;
779#endif
780
adaf4ef0 781 /* For libcalls this is invoked without TYPE from the backends
782 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
783 case. */
784 if (type == NULL_TREE)
785 return mode;
786
3b2411a8 787 /* FIXME: this is the same logic that was there until GCC 4.4, but we
788 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
789 is not defined. The affected targets are M32C, S390, SPARC. */
790#ifdef PROMOTE_MODE
49130e9a 791 code = TREE_CODE (type);
792 unsignedp = *punsignedp;
f9aab3b6 793
f9aab3b6 794 switch (code)
795 {
f9aab3b6 796 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
06f0b99c 797 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
3b2411a8 798 PROMOTE_MODE (mode, unsignedp, type);
799 *punsignedp = unsignedp;
800 return mode;
f9aab3b6 801 break;
f9aab3b6 802
184aad03 803#ifdef POINTERS_EXTEND_UNSIGNED
bc244e4c 804 case REFERENCE_TYPE:
f9aab3b6 805 case POINTER_TYPE:
3b2411a8 806 *punsignedp = POINTERS_EXTEND_UNSIGNED;
98155838 807 return targetm.addr_space.address_mode
808 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
f9aab3b6 809 break;
184aad03 810#endif
6be48139 811
941522d6 812 default:
3b2411a8 813 return mode;
f9aab3b6 814 }
3b2411a8 815#else
f9aab3b6 816 return mode;
3b2411a8 817#endif
f9aab3b6 818}
3b2411a8 819
820
821/* Use one of promote_mode or promote_function_mode to find the promoted
822 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
823 of DECL after promotion. */
824
3754d046 825machine_mode
3b2411a8 826promote_decl_mode (const_tree decl, int *punsignedp)
827{
828 tree type = TREE_TYPE (decl);
829 int unsignedp = TYPE_UNSIGNED (type);
3754d046 830 machine_mode mode = DECL_MODE (decl);
831 machine_mode pmode;
3b2411a8 832
c879dbcf 833 if (TREE_CODE (decl) == RESULT_DECL
834 || TREE_CODE (decl) == PARM_DECL)
3b2411a8 835 pmode = promote_function_mode (type, mode, &unsignedp,
c879dbcf 836 TREE_TYPE (current_function_decl), 2);
3b2411a8 837 else
838 pmode = promote_mode (type, mode, &unsignedp);
839
840 if (punsignedp)
841 *punsignedp = unsignedp;
842 return pmode;
843}
844
f9aab3b6 845\f
dfe00a8f 846/* Controls the behaviour of {anti_,}adjust_stack. */
847static bool suppress_reg_args_size;
848
849/* A helper for adjust_stack and anti_adjust_stack. */
850
851static void
852adjust_stack_1 (rtx adjust, bool anti_p)
853{
a903c451 854 rtx temp;
855 rtx_insn *insn;
dfe00a8f 856
dfe00a8f 857 /* Hereafter anti_p means subtract_p. */
3764c94e 858 if (!STACK_GROWS_DOWNWARD)
859 anti_p = !anti_p;
dfe00a8f 860
861 temp = expand_binop (Pmode,
862 anti_p ? sub_optab : add_optab,
863 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
864 OPTAB_LIB_WIDEN);
865
866 if (temp != stack_pointer_rtx)
867 insn = emit_move_insn (stack_pointer_rtx, temp);
868 else
869 {
870 insn = get_last_insn ();
871 temp = single_set (insn);
872 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
873 }
874
875 if (!suppress_reg_args_size)
876 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
877}
878
7fa774cd 879/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
880 This pops when ADJUST is positive. ADJUST need not be constant. */
881
882void
35cb5232 883adjust_stack (rtx adjust)
7fa774cd 884{
7fa774cd 885 if (adjust == const0_rtx)
886 return;
887
91b70175 888 /* We expect all variable sized adjustments to be multiple of
889 PREFERRED_STACK_BOUNDARY. */
971ba038 890 if (CONST_INT_P (adjust))
91b70175 891 stack_pointer_delta -= INTVAL (adjust);
892
dfe00a8f 893 adjust_stack_1 (adjust, false);
7fa774cd 894}
895
896/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
897 This pushes when ADJUST is positive. ADJUST need not be constant. */
898
899void
35cb5232 900anti_adjust_stack (rtx adjust)
7fa774cd 901{
7fa774cd 902 if (adjust == const0_rtx)
903 return;
904
91b70175 905 /* We expect all variable sized adjustments to be multiple of
906 PREFERRED_STACK_BOUNDARY. */
971ba038 907 if (CONST_INT_P (adjust))
91b70175 908 stack_pointer_delta += INTVAL (adjust);
909
dfe00a8f 910 adjust_stack_1 (adjust, true);
7fa774cd 911}
912
913/* Round the size of a block to be pushed up to the boundary required
914 by this machine. SIZE is the desired size, which need not be constant. */
915
69d39d70 916static rtx
35cb5232 917round_push (rtx size)
7fa774cd 918{
60778e62 919 rtx align_rtx, alignm1_rtx;
3737c3eb 920
60778e62 921 if (!SUPPORTS_STACK_ALIGNMENT
922 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
7fa774cd 923 {
60778e62 924 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
925
926 if (align == 1)
927 return size;
928
929 if (CONST_INT_P (size))
930 {
931 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
3737c3eb 932
60778e62 933 if (INTVAL (size) != new_size)
934 size = GEN_INT (new_size);
935 return size;
936 }
937
938 align_rtx = GEN_INT (align);
939 alignm1_rtx = GEN_INT (align - 1);
7fa774cd 940 }
941 else
942 {
60778e62 943 /* If crtl->preferred_stack_boundary might still grow, use
944 virtual_preferred_stack_boundary_rtx instead. This will be
945 substituted by the right value in vregs pass and optimized
946 during combine. */
947 align_rtx = virtual_preferred_stack_boundary_rtx;
29c05e22 948 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
949 NULL_RTX);
7fa774cd 950 }
3737c3eb 951
60778e62 952 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
953 but we know it can't. So add ourselves and then do
954 TRUNC_DIV_EXPR. */
955 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
956 NULL_RTX, 1, OPTAB_LIB_WIDEN);
957 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
958 NULL_RTX, 1);
959 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
960
7fa774cd 961 return size;
962}
963\f
dbd6697a 964/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
965 to a previously-created save area. If no save area has been allocated,
966 this function will allocate one. If a save area is specified, it
e9c97615 967 must be of the proper mode. */
dbd6697a 968
969void
e9c97615 970emit_stack_save (enum save_level save_level, rtx *psave)
dbd6697a 971{
972 rtx sa = *psave;
973 /* The default is that we use a move insn and save in a Pmode object. */
71512c05 974 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
3754d046 975 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
dbd6697a 976
977 /* See if this machine has anything special to do for this kind of save. */
978 switch (save_level)
979 {
dbd6697a 980 case SAVE_BLOCK:
71512c05 981 if (targetm.have_save_stack_block ())
982 fcn = targetm.gen_save_stack_block;
dbd6697a 983 break;
dbd6697a 984 case SAVE_FUNCTION:
71512c05 985 if (targetm.have_save_stack_function ())
986 fcn = targetm.gen_save_stack_function;
dbd6697a 987 break;
dbd6697a 988 case SAVE_NONLOCAL:
71512c05 989 if (targetm.have_save_stack_nonlocal ())
990 fcn = targetm.gen_save_stack_nonlocal;
dbd6697a 991 break;
941522d6 992 default:
993 break;
dbd6697a 994 }
995
996 /* If there is no save area and we have to allocate one, do so. Otherwise
997 verify the save area is the proper mode. */
998
999 if (sa == 0)
1000 {
1001 if (mode != VOIDmode)
1002 {
1003 if (save_level == SAVE_NONLOCAL)
1004 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1005 else
1006 *psave = sa = gen_reg_rtx (mode);
1007 }
1008 }
dbd6697a 1009
e9c97615 1010 do_pending_stack_adjust ();
1011 if (sa != 0)
1012 sa = validize_mem (sa);
1013 emit_insn (fcn (sa, stack_pointer_rtx));
dbd6697a 1014}
1015
1016/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
e9c97615 1017 area made by emit_stack_save. If it is zero, we have nothing to do. */
dbd6697a 1018
1019void
e9c97615 1020emit_stack_restore (enum save_level save_level, rtx sa)
dbd6697a 1021{
1022 /* The default is that we use a move insn. */
71512c05 1023 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
dbd6697a 1024
1ea0f42a 1025 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1026 STACK_POINTER and HARD_FRAME_POINTER.
1027 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1028 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1029 aligned variables, which is reflected in ix86_can_eliminate.
1030 We normally still have the realigned STACK_POINTER that we can use.
1031 But if there is a stack restore still present at reload, it can trigger
1032 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1033 FRAME_POINTER into a hard reg.
1034 To prevent this situation, we force need_drap if we emit a stack
1035 restore. */
1036 if (SUPPORTS_STACK_ALIGNMENT)
1037 crtl->need_drap = true;
1038
dbd6697a 1039 /* See if this machine has anything special to do for this kind of save. */
1040 switch (save_level)
1041 {
dbd6697a 1042 case SAVE_BLOCK:
71512c05 1043 if (targetm.have_restore_stack_block ())
1044 fcn = targetm.gen_restore_stack_block;
dbd6697a 1045 break;
dbd6697a 1046 case SAVE_FUNCTION:
71512c05 1047 if (targetm.have_restore_stack_function ())
1048 fcn = targetm.gen_restore_stack_function;
dbd6697a 1049 break;
dbd6697a 1050 case SAVE_NONLOCAL:
71512c05 1051 if (targetm.have_restore_stack_nonlocal ())
1052 fcn = targetm.gen_restore_stack_nonlocal;
dbd6697a 1053 break;
941522d6 1054 default:
1055 break;
dbd6697a 1056 }
1057
bbe57f89 1058 if (sa != 0)
62ede483 1059 {
1060 sa = validize_mem (sa);
1061 /* These clobbers prevent the scheduler from moving
1062 references to variable arrays below the code
7299020b 1063 that deletes (pops) the arrays. */
18b42941 1064 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1065 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
62ede483 1066 }
bbe57f89 1067
05ae776c 1068 discard_pending_stack_adjust ();
1069
e9c97615 1070 emit_insn (fcn (stack_pointer_rtx, sa));
dbd6697a 1071}
4ee9c684 1072
1073/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
97354ae4 1074 function. This should be called whenever we allocate or deallocate
1075 dynamic stack space. */
4ee9c684 1076
1077void
1078update_nonlocal_goto_save_area (void)
1079{
1080 tree t_save;
1081 rtx r_save;
1082
1083 /* The nonlocal_goto_save_area object is an array of N pointers. The
1084 first one is used for the frame pointer save; the rest are sized by
1085 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1086 of the stack save area slots. */
21dc8b2b 1087 t_save = build4 (ARRAY_REF,
1088 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1089 cfun->nonlocal_goto_save_area,
b55f9493 1090 integer_one_node, NULL_TREE, NULL_TREE);
4ee9c684 1091 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1092
e9c97615 1093 emit_stack_save (SAVE_NONLOCAL, &r_save);
4ee9c684 1094}
97354ae4 1095
1096/* Record a new stack level for the current function. This should be called
1097 whenever we allocate or deallocate dynamic stack space. */
1098
1099void
1100record_new_stack_level (void)
1101{
1102 /* Record the new stack level for nonlocal gotos. */
1103 if (cfun->nonlocal_goto_save_area)
1104 update_nonlocal_goto_save_area ();
1105
1106 /* Record the new stack level for SJLJ exceptions. */
1107 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1108 update_sjlj_context ();
1109}
dbd6697a 1110\f
7fa774cd 1111/* Return an rtx representing the address of an area of memory dynamically
5be42b39 1112 pushed on the stack.
7fa774cd 1113
1114 Any required stack pointer alignment is preserved.
1115
1116 SIZE is an rtx representing the size of the area.
83ad791a 1117
5be42b39 1118 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1119 parameter may be zero. If so, a proper value will be extracted
1120 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1121
1122 REQUIRED_ALIGN is the alignment (in bits) required for the region
1123 of memory.
990495a7 1124
1125 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1126 stack space allocated by the generated code cannot be added with itself
1127 in the course of the execution of the function. It is always safe to
1128 pass FALSE here and the following criterion is sufficient in order to
1129 pass TRUE: every path in the CFG that starts at the allocation point and
1130 loops to it executes the associated deallocation code. */
7fa774cd 1131
1132rtx
5be42b39 1133allocate_dynamic_stack_space (rtx size, unsigned size_align,
1134 unsigned required_align, bool cannot_accumulate)
7fa774cd 1135{
990495a7 1136 HOST_WIDE_INT stack_usage_size = -1;
a903c451 1137 rtx_code_label *final_label;
1138 rtx final_target, target;
8b51e3aa 1139 unsigned extra_align = 0;
5be42b39 1140 bool must_align;
990495a7 1141
c0a9c3cd 1142 /* If we're asking for zero bytes, it doesn't matter what we point
c3418f42 1143 to since we can't dereference it. But return a reasonable
c0a9c3cd 1144 address anyway. */
1145 if (size == const0_rtx)
1146 return virtual_stack_dynamic_rtx;
1147
1148 /* Otherwise, show we're calling alloca or equivalent. */
18d50ae6 1149 cfun->calls_alloca = 1;
c0a9c3cd 1150
990495a7 1151 /* If stack usage info is requested, look into the size we are passed.
1152 We need to do so this early to avoid the obfuscation that may be
1153 introduced later by the various alignment operations. */
8c0dd614 1154 if (flag_stack_usage_info)
990495a7 1155 {
60778e62 1156 if (CONST_INT_P (size))
990495a7 1157 stack_usage_size = INTVAL (size);
60778e62 1158 else if (REG_P (size))
990495a7 1159 {
1160 /* Look into the last emitted insn and see if we can deduce
1161 something for the register. */
a903c451 1162 rtx_insn *insn;
1163 rtx set, note;
990495a7 1164 insn = get_last_insn ();
1165 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1166 {
60778e62 1167 if (CONST_INT_P (SET_SRC (set)))
990495a7 1168 stack_usage_size = INTVAL (SET_SRC (set));
1169 else if ((note = find_reg_equal_equiv_note (insn))
60778e62 1170 && CONST_INT_P (XEXP (note, 0)))
990495a7 1171 stack_usage_size = INTVAL (XEXP (note, 0));
1172 }
1173 }
1174
1175 /* If the size is not constant, we can't say anything. */
1176 if (stack_usage_size == -1)
1177 {
1178 current_function_has_unbounded_dynamic_stack_size = 1;
1179 stack_usage_size = 0;
1180 }
1181 }
1182
7fa774cd 1183 /* Ensure the size is in the proper mode. */
1184 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1185 size = convert_to_mode (Pmode, size, 1);
1186
5be42b39 1187 /* Adjust SIZE_ALIGN, if needed. */
1188 if (CONST_INT_P (size))
1189 {
1190 unsigned HOST_WIDE_INT lsb;
1191
1192 lsb = INTVAL (size);
1193 lsb &= -lsb;
1194
1195 /* Watch out for overflow truncating to "unsigned". */
1196 if (lsb > UINT_MAX / BITS_PER_UNIT)
1197 size_align = 1u << (HOST_BITS_PER_INT - 1);
1198 else
1199 size_align = (unsigned)lsb * BITS_PER_UNIT;
1200 }
1201 else if (size_align < BITS_PER_UNIT)
1202 size_align = BITS_PER_UNIT;
1203
8b51e3aa 1204 /* We can't attempt to minimize alignment necessary, because we don't
1205 know the final value of preferred_stack_boundary yet while executing
1206 this code. */
1207 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1208 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1209
7fa774cd 1210 /* We will need to ensure that the address we return is aligned to
8b51e3aa 1211 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1212 always know its final value at this point in the compilation (it
1213 might depend on the size of the outgoing parameter lists, for
1214 example), so we must align the value to be returned in that case.
1215 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1216 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1217 We must also do an alignment operation on the returned value if
1218 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1219
1220 If we have to align, we must leave space in SIZE for the hole
1221 that might result from the alignment operation. */
1222
1223 must_align = (crtl->preferred_stack_boundary < required_align);
1224 if (must_align)
990495a7 1225 {
8b51e3aa 1226 if (required_align > PREFERRED_STACK_BOUNDARY)
1227 extra_align = PREFERRED_STACK_BOUNDARY;
1228 else if (required_align > STACK_BOUNDARY)
1229 extra_align = STACK_BOUNDARY;
1230 else
1231 extra_align = BITS_PER_UNIT;
156512c1 1232 }
1233
8b51e3aa 1234 /* ??? STACK_POINTER_OFFSET is always defined now. */
1235#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1236 must_align = true;
1237 extra_align = BITS_PER_UNIT;
1238#endif
156512c1 1239
8b51e3aa 1240 if (must_align)
1241 {
1242 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
5be42b39 1243
29c05e22 1244 size = plus_constant (Pmode, size, extra);
5be42b39 1245 size = force_operand (size, NULL_RTX);
990495a7 1246
8c0dd614 1247 if (flag_stack_usage_info)
5be42b39 1248 stack_usage_size += extra;
8b51e3aa 1249
5be42b39 1250 if (extra && size_align > extra_align)
1251 size_align = extra_align;
990495a7 1252 }
35be3c55 1253
7fa774cd 1254 /* Round the size to a multiple of the required stack alignment.
8b51e3aa 1255 Since the stack if presumed to be rounded before this allocation,
7fa774cd 1256 this will maintain the required alignment.
1257
1258 If the stack grows downward, we could save an insn by subtracting
1259 SIZE from the stack pointer and then aligning the stack pointer.
1260 The problem with this is that the stack pointer may be unaligned
1261 between the execution of the subtraction and alignment insns and
1262 some machines do not allow this. Even on those that do, some
1263 signal handlers malfunction if a signal should occur between those
1264 insns. Since this is an extremely rare event, we have no reliable
1265 way of knowing which systems have this problem. So we avoid even
1266 momentarily mis-aligning the stack. */
5be42b39 1267 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
990495a7 1268 {
1269 size = round_push (size);
7fa774cd 1270
8c0dd614 1271 if (flag_stack_usage_info)
990495a7 1272 {
60778e62 1273 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
990495a7 1274 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1275 }
1276 }
1277
5be42b39 1278 target = gen_reg_rtx (Pmode);
48b14f50 1279
990495a7 1280 /* The size is supposed to be fully adjusted at this point so record it
1281 if stack usage info is requested. */
8c0dd614 1282 if (flag_stack_usage_info)
990495a7 1283 {
1284 current_function_dynamic_stack_size += stack_usage_size;
1285
1286 /* ??? This is gross but the only safe stance in the absence
1287 of stack usage oriented flow analysis. */
1288 if (!cannot_accumulate)
1289 current_function_has_unbounded_dynamic_stack_size = 1;
1290 }
7fa774cd 1291
a903c451 1292 final_label = NULL;
48b14f50 1293 final_target = NULL_RTX;
1294
1295 /* If we are splitting the stack, we need to ask the backend whether
1296 there is enough room on the current stack. If there isn't, or if
1297 the backend doesn't know how to tell is, then we need to call a
1298 function to allocate memory in some other way. This memory will
1299 be released when we release the current stack segment. The
1300 effect is that stack allocation becomes less efficient, but at
1301 least it doesn't cause a stack overflow. */
1302 if (flag_split_stack)
1303 {
a903c451 1304 rtx_code_label *available_label;
1305 rtx ask, space, func;
48b14f50 1306
a903c451 1307 available_label = NULL;
48b14f50 1308
a558802e 1309 if (targetm.have_split_stack_space_check ())
48b14f50 1310 {
1311 available_label = gen_label_rtx ();
1312
1313 /* This instruction will branch to AVAILABLE_LABEL if there
1314 are SIZE bytes available on the stack. */
a558802e 1315 emit_insn (targetm.gen_split_stack_space_check
1316 (size, available_label));
48b14f50 1317 }
48b14f50 1318
70c912cf 1319 /* The __morestack_allocate_stack_space function will allocate
d2461405 1320 memory using malloc. If the alignment of the memory returned
1321 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1322 make sure we allocate enough space. */
1323 if (MALLOC_ABI_ALIGNMENT >= required_align)
1324 ask = size;
1325 else
1326 {
1327 ask = expand_binop (Pmode, add_optab, size,
0359f9f5 1328 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1329 Pmode),
d2461405 1330 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1331 must_align = true;
1332 }
70c912cf 1333
48b14f50 1334 func = init_one_libfunc ("__morestack_allocate_stack_space");
1335
1336 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
70c912cf 1337 1, ask, Pmode);
48b14f50 1338
1339 if (available_label == NULL_RTX)
1340 return space;
1341
1342 final_target = gen_reg_rtx (Pmode);
48b14f50 1343
1344 emit_move_insn (final_target, space);
1345
1346 final_label = gen_label_rtx ();
1347 emit_jump (final_label);
1348
1349 emit_label (available_label);
1350 }
1351
7fa774cd 1352 do_pending_stack_adjust ();
1353
91b70175 1354 /* We ought to be called always on the toplevel and stack ought to be aligned
3fb1e43b 1355 properly. */
611234b4 1356 gcc_assert (!(stack_pointer_delta
1357 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
91b70175 1358
42982f3e 1359 /* If needed, check that we have the required amount of stack. Take into
1360 account what has already been checked. */
1361 if (STACK_CHECK_MOVING_SP)
1362 ;
1363 else if (flag_stack_check == GENERIC_STACK_CHECK)
4852b829 1364 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1365 size);
1366 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1367 probe_stack_range (STACK_CHECK_PROTECT, size);
382ff7aa 1368
4bb7c660 1369 /* Don't let anti_adjust_stack emit notes. */
1370 suppress_reg_args_size = true;
1371
7fa774cd 1372 /* Perform the required allocation from the stack. Some systems do
1373 this differently than simply incrementing/decrementing from the
941522d6 1374 stack pointer, such as acquiring the space by calling malloc(). */
a558802e 1375 if (targetm.have_allocate_stack ())
7fa774cd 1376 {
8786db1e 1377 struct expand_operand ops[2];
479e4d5e 1378 /* We don't have to check against the predicate for operand 0 since
1379 TARGET is known to be a pseudo of the proper mode, which must
8786db1e 1380 be valid for the operand. */
1381 create_fixed_operand (&ops[0], target);
1382 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
a558802e 1383 expand_insn (targetm.code_for_allocate_stack, 2, ops);
7fa774cd 1384 }
1385 else
184aad03 1386 {
60778e62 1387 int saved_stack_pointer_delta;
1388
3764c94e 1389 if (!STACK_GROWS_DOWNWARD)
1390 emit_move_insn (target, virtual_stack_dynamic_rtx);
8f8ac140 1391
1392 /* Check stack bounds if necessary. */
18d50ae6 1393 if (crtl->limit_stack)
8f8ac140 1394 {
1395 rtx available;
a903c451 1396 rtx_code_label *space_available = gen_label_rtx ();
3764c94e 1397 if (STACK_GROWS_DOWNWARD)
1398 available = expand_binop (Pmode, sub_optab,
1399 stack_pointer_rtx, stack_limit_rtx,
1400 NULL_RTX, 1, OPTAB_WIDEN);
1401 else
1402 available = expand_binop (Pmode, sub_optab,
1403 stack_limit_rtx, stack_pointer_rtx,
1404 NULL_RTX, 1, OPTAB_WIDEN);
1405
8f8ac140 1406 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
2b96c5f6 1407 space_available);
4db8dd0c 1408 if (targetm.have_trap ())
1409 emit_insn (targetm.gen_trap ());
8f8ac140 1410 else
8f8ac140 1411 error ("stack limits not supported on this target");
1412 emit_barrier ();
1413 emit_label (space_available);
1414 }
1415
60778e62 1416 saved_stack_pointer_delta = stack_pointer_delta;
dfe00a8f 1417
42982f3e 1418 if (flag_stack_check && STACK_CHECK_MOVING_SP)
d1b92264 1419 anti_adjust_stack_and_probe (size, false);
42982f3e 1420 else
1421 anti_adjust_stack (size);
dfe00a8f 1422
60778e62 1423 /* Even if size is constant, don't modify stack_pointer_delta.
1424 The constant size alloca should preserve
1425 crtl->preferred_stack_boundary alignment. */
1426 stack_pointer_delta = saved_stack_pointer_delta;
15c6cf6b 1427
3764c94e 1428 if (STACK_GROWS_DOWNWARD)
1429 emit_move_insn (target, virtual_stack_dynamic_rtx);
941522d6 1430 }
7fa774cd 1431
4bb7c660 1432 suppress_reg_args_size = false;
1433
5be42b39 1434 /* Finish up the split stack handling. */
1435 if (final_label != NULL_RTX)
1436 {
1437 gcc_assert (flag_split_stack);
1438 emit_move_insn (final_target, target);
1439 emit_label (final_label);
1440 target = final_target;
1441 }
1442
1443 if (must_align)
83ad791a 1444 {
8c08cc16 1445 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
a92771b8 1446 but we know it can't. So add ourselves and then do
1447 TRUNC_DIV_EXPR. */
6e8c172a 1448 target = expand_binop (Pmode, add_optab, target,
0359f9f5 1449 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1450 Pmode),
8c08cc16 1451 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1452 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
0359f9f5 1453 gen_int_mode (required_align / BITS_PER_UNIT,
1454 Pmode),
50b0c9ee 1455 NULL_RTX, 1);
83ad791a 1456 target = expand_mult (Pmode, target,
0359f9f5 1457 gen_int_mode (required_align / BITS_PER_UNIT,
1458 Pmode),
50b0c9ee 1459 NULL_RTX, 1);
83ad791a 1460 }
6be48139 1461
5be42b39 1462 /* Now that we've committed to a return value, mark its alignment. */
1463 mark_reg_pointer (target, required_align);
1464
97354ae4 1465 /* Record the new stack level. */
1466 record_new_stack_level ();
c0a9c3cd 1467
7fa774cd 1468 return target;
1469}
1470\f
6be48139 1471/* A front end may want to override GCC's stack checking by providing a
155b05dc 1472 run-time routine to call to check the stack, so provide a mechanism for
1473 calling that routine. */
1474
1f3233d1 1475static GTY(()) rtx stack_check_libfunc;
155b05dc 1476
1477void
e3805e9e 1478set_stack_check_libfunc (const char *libfunc_name)
155b05dc 1479{
e3805e9e 1480 gcc_assert (stack_check_libfunc == NULL_RTX);
1481 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
155b05dc 1482}
1483\f
382ff7aa 1484/* Emit one stack probe at ADDRESS, an address within the stack. */
1485
f164c08a 1486void
35cb5232 1487emit_stack_probe (rtx address)
382ff7aa 1488{
a558802e 1489 if (targetm.have_probe_stack_address ())
1490 emit_insn (targetm.gen_probe_stack_address (address));
28d5c3d9 1491 else
28d5c3d9 1492 {
1493 rtx memref = gen_rtx_MEM (word_mode, address);
382ff7aa 1494
28d5c3d9 1495 MEM_VOLATILE_P (memref) = 1;
382ff7aa 1496
28d5c3d9 1497 /* See if we have an insn to probe the stack. */
a558802e 1498 if (targetm.have_probe_stack ())
1499 emit_insn (targetm.gen_probe_stack (memref));
28d5c3d9 1500 else
28d5c3d9 1501 emit_move_insn (memref, const0_rtx);
1502 }
382ff7aa 1503}
1504
6be48139 1505/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
42982f3e 1506 FIRST is a constant and size is a Pmode RTX. These are offsets from
1507 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1508 or subtract them from the stack pointer. */
1509
1510#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
382ff7aa 1511
2b785411 1512#if STACK_GROWS_DOWNWARD
382ff7aa 1513#define STACK_GROW_OP MINUS
42982f3e 1514#define STACK_GROW_OPTAB sub_optab
1515#define STACK_GROW_OFF(off) -(off)
382ff7aa 1516#else
1517#define STACK_GROW_OP PLUS
42982f3e 1518#define STACK_GROW_OPTAB add_optab
1519#define STACK_GROW_OFF(off) (off)
382ff7aa 1520#endif
1521
1522void
35cb5232 1523probe_stack_range (HOST_WIDE_INT first, rtx size)
382ff7aa 1524{
479e4d5e 1525 /* First ensure SIZE is Pmode. */
1526 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1527 size = convert_to_mode (Pmode, size, 1);
1528
42982f3e 1529 /* Next see if we have a function to check the stack. */
1530 if (stack_check_libfunc)
024e3108 1531 {
42982f3e 1532 rtx addr = memory_address (Pmode,
8da1563c 1533 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1534 stack_pointer_rtx,
29c05e22 1535 plus_constant (Pmode,
1536 size, first)));
53226a3c 1537 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1538 Pmode);
024e3108 1539 }
155b05dc 1540
42982f3e 1541 /* Next see if we have an insn to check the stack. */
a558802e 1542 else if (targetm.have_check_stack ())
382ff7aa 1543 {
8786db1e 1544 struct expand_operand ops[1];
42982f3e 1545 rtx addr = memory_address (Pmode,
1546 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1547 stack_pointer_rtx,
29c05e22 1548 plus_constant (Pmode,
1549 size, first)));
15b37e31 1550 bool success;
8786db1e 1551 create_input_operand (&ops[0], addr, Pmode);
a558802e 1552 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
15b37e31 1553 gcc_assert (success);
382ff7aa 1554 }
382ff7aa 1555
42982f3e 1556 /* Otherwise we have to generate explicit probes. If we have a constant
1557 small number of them to generate, that's the easy case. */
1558 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
382ff7aa 1559 {
42982f3e 1560 HOST_WIDE_INT isize = INTVAL (size), i;
1561 rtx addr;
1562
1563 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1564 it exceeds SIZE. If only one probe is needed, this will not
1565 generate any code. Then probe at FIRST + SIZE. */
1566 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1567 {
1568 addr = memory_address (Pmode,
29c05e22 1569 plus_constant (Pmode, stack_pointer_rtx,
42982f3e 1570 STACK_GROW_OFF (first + i)));
1571 emit_stack_probe (addr);
1572 }
1573
1574 addr = memory_address (Pmode,
29c05e22 1575 plus_constant (Pmode, stack_pointer_rtx,
42982f3e 1576 STACK_GROW_OFF (first + isize)));
1577 emit_stack_probe (addr);
382ff7aa 1578 }
1579
42982f3e 1580 /* In the variable case, do the same as above, but in a loop. Note that we
1581 must be extra careful with variables wrapping around because we might be
1582 at the very top (or the very bottom) of the address space and we have to
1583 be able to handle this case properly; in particular, we use an equality
1584 test for the loop condition. */
382ff7aa 1585 else
1586 {
42982f3e 1587 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
a903c451 1588 rtx_code_label *loop_lab = gen_label_rtx ();
1589 rtx_code_label *end_lab = gen_label_rtx ();
382ff7aa 1590
42982f3e 1591 /* Step 1: round SIZE to the previous multiple of the interval. */
1592
1593 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1594 rounded_size
5d5ee71f 1595 = simplify_gen_binary (AND, Pmode, size,
1596 gen_int_mode (-PROBE_INTERVAL, Pmode));
42982f3e 1597 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1598
1599
1600 /* Step 2: compute initial and final value of the loop counter. */
1601
1602 /* TEST_ADDR = SP + FIRST. */
1603 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1604 stack_pointer_rtx,
c338f2e3 1605 gen_int_mode (first, Pmode)),
1606 NULL_RTX);
42982f3e 1607
1608 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1609 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1610 test_addr,
1611 rounded_size_op), NULL_RTX);
1612
1613
1614 /* Step 3: the loop
1615
1616 while (TEST_ADDR != LAST_ADDR)
1617 {
1618 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1619 probe at TEST_ADDR
1620 }
1621
1622 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1623 until it is equal to ROUNDED_SIZE. */
382ff7aa 1624
1625 emit_label (loop_lab);
382ff7aa 1626
42982f3e 1627 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1628 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1629 end_lab);
1630
1631 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1632 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
0359f9f5 1633 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
382ff7aa 1634 1, OPTAB_WIDEN);
382ff7aa 1635
611234b4 1636 gcc_assert (temp == test_addr);
382ff7aa 1637
42982f3e 1638 /* Probe at TEST_ADDR. */
1639 emit_stack_probe (test_addr);
1640
1641 emit_jump (loop_lab);
1642
382ff7aa 1643 emit_label (end_lab);
1644
42982f3e 1645
1646 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1647 that SIZE is equal to ROUNDED_SIZE. */
1648
1649 /* TEMP = SIZE - ROUNDED_SIZE. */
1650 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1651 if (temp != const0_rtx)
1652 {
1653 rtx addr;
1654
60778e62 1655 if (CONST_INT_P (temp))
42982f3e 1656 {
1657 /* Use [base + disp} addressing mode if supported. */
1658 HOST_WIDE_INT offset = INTVAL (temp);
1659 addr = memory_address (Pmode,
29c05e22 1660 plus_constant (Pmode, last_addr,
42982f3e 1661 STACK_GROW_OFF (offset)));
1662 }
1663 else
1664 {
1665 /* Manual CSE if the difference is not known at compile-time. */
1666 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1667 addr = memory_address (Pmode,
1668 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1669 last_addr, temp));
1670 }
1671
1672 emit_stack_probe (addr);
1673 }
382ff7aa 1674 }
d9d7686b 1675
1676 /* Make sure nothing is scheduled before we are done. */
1677 emit_insn (gen_blockage ());
382ff7aa 1678}
42982f3e 1679
d1b92264 1680/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1681 while probing it. This pushes when SIZE is positive. SIZE need not
1682 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1683 by plus SIZE at the end. */
42982f3e 1684
d1b92264 1685void
1686anti_adjust_stack_and_probe (rtx size, bool adjust_back)
42982f3e 1687{
d1b92264 1688 /* We skip the probe for the first interval + a small dope of 4 words and
1689 probe that many bytes past the specified size to maintain a protection
1690 area at the botton of the stack. */
42982f3e 1691 const int dope = 4 * UNITS_PER_WORD;
1692
1693 /* First ensure SIZE is Pmode. */
1694 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1695 size = convert_to_mode (Pmode, size, 1);
1696
1697 /* If we have a constant small number of probes to generate, that's the
1698 easy case. */
60778e62 1699 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
42982f3e 1700 {
1701 HOST_WIDE_INT isize = INTVAL (size), i;
1702 bool first_probe = true;
1703
f164c08a 1704 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
42982f3e 1705 values of N from 1 until it exceeds SIZE. If only one probe is
1706 needed, this will not generate any code. Then adjust and probe
1707 to PROBE_INTERVAL + SIZE. */
1708 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1709 {
1710 if (first_probe)
1711 {
1712 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1713 first_probe = false;
1714 }
1715 else
1716 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1717 emit_stack_probe (stack_pointer_rtx);
1718 }
1719
1720 if (first_probe)
29c05e22 1721 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
42982f3e 1722 else
29c05e22 1723 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
42982f3e 1724 emit_stack_probe (stack_pointer_rtx);
1725 }
1726
1727 /* In the variable case, do the same as above, but in a loop. Note that we
1728 must be extra careful with variables wrapping around because we might be
1729 at the very top (or the very bottom) of the address space and we have to
1730 be able to handle this case properly; in particular, we use an equality
1731 test for the loop condition. */
1732 else
1733 {
1734 rtx rounded_size, rounded_size_op, last_addr, temp;
a903c451 1735 rtx_code_label *loop_lab = gen_label_rtx ();
1736 rtx_code_label *end_lab = gen_label_rtx ();
42982f3e 1737
1738
1739 /* Step 1: round SIZE to the previous multiple of the interval. */
1740
1741 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1742 rounded_size
5d5ee71f 1743 = simplify_gen_binary (AND, Pmode, size,
1744 gen_int_mode (-PROBE_INTERVAL, Pmode));
42982f3e 1745 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1746
1747
1748 /* Step 2: compute initial and final value of the loop counter. */
1749
1750 /* SP = SP_0 + PROBE_INTERVAL. */
1751 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1752
1753 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1754 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1755 stack_pointer_rtx,
1756 rounded_size_op), NULL_RTX);
1757
1758
1759 /* Step 3: the loop
1760
f164c08a 1761 while (SP != LAST_ADDR)
1762 {
1763 SP = SP + PROBE_INTERVAL
1764 probe at SP
1765 }
42982f3e 1766
f164c08a 1767 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
42982f3e 1768 values of N from 1 until it is equal to ROUNDED_SIZE. */
1769
1770 emit_label (loop_lab);
1771
1772 /* Jump to END_LAB if SP == LAST_ADDR. */
1773 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1774 Pmode, 1, end_lab);
1775
1776 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1777 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1778 emit_stack_probe (stack_pointer_rtx);
1779
1780 emit_jump (loop_lab);
1781
1782 emit_label (end_lab);
1783
1784
f164c08a 1785 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
42982f3e 1786 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1787
1788 /* TEMP = SIZE - ROUNDED_SIZE. */
1789 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1790 if (temp != const0_rtx)
1791 {
1792 /* Manual CSE if the difference is not known at compile-time. */
1793 if (GET_CODE (temp) != CONST_INT)
1794 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1795 anti_adjust_stack (temp);
1796 emit_stack_probe (stack_pointer_rtx);
1797 }
1798 }
1799
d1b92264 1800 /* Adjust back and account for the additional first interval. */
1801 if (adjust_back)
29c05e22 1802 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d1b92264 1803 else
1804 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
42982f3e 1805}
1806
7fa774cd 1807/* Return an rtx representing the register or memory location
1808 in which a scalar value of data type VALTYPE
1809 was returned by a function call to function FUNC.
46b3ff29 1810 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1811 function is known, otherwise 0.
16beb099 1812 OUTGOING is 1 if on a machine with register windows this function
1813 should return the register in which the function will put its result
6312a35e 1814 and 0 otherwise. */
7fa774cd 1815
1816rtx
fb80456a 1817hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
35cb5232 1818 int outgoing ATTRIBUTE_UNUSED)
7fa774cd 1819{
16beb099 1820 rtx val;
02e7a332 1821
46b3ff29 1822 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
02e7a332 1823
8ad4c111 1824 if (REG_P (val)
883e35f4 1825 && GET_MODE (val) == BLKmode)
1826 {
02e7a332 1827 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
3754d046 1828 machine_mode tmpmode;
02e7a332 1829
6be48139 1830 /* int_size_in_bytes can return -1. We don't need a check here
89f18f73 1831 since the value of bytes will then be large enough that no
1832 mode will match anyway. */
6be48139 1833
883e35f4 1834 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
ff385626 1835 tmpmode != VOIDmode;
1836 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1837 {
1838 /* Have we found a large enough mode? */
1839 if (GET_MODE_SIZE (tmpmode) >= bytes)
1840 break;
1841 }
883e35f4 1842
1843 /* No suitable mode found. */
611234b4 1844 gcc_assert (tmpmode != VOIDmode);
883e35f4 1845
1846 PUT_MODE (val, tmpmode);
6be48139 1847 }
883e35f4 1848 return val;
7fa774cd 1849}
1850
1851/* Return an rtx representing the register or memory location
1852 in which a scalar value of mode MODE was returned by a library call. */
1853
1854rtx
3754d046 1855hard_libcall_value (machine_mode mode, rtx fun)
7fa774cd 1856{
578d1295 1857 return targetm.calls.libcall_value (mode, fun);
7fa774cd 1858}
0ff6d058 1859
1860/* Look up the tree code for a given rtx code
1861 to provide the arithmetic operation for REAL_ARITHMETIC.
1862 The function returns an int because the caller may not know
1863 what `enum tree_code' means. */
1864
1865int
35cb5232 1866rtx_to_tree_code (enum rtx_code code)
0ff6d058 1867{
1868 enum tree_code tcode;
1869
1870 switch (code)
1871 {
1872 case PLUS:
1873 tcode = PLUS_EXPR;
1874 break;
1875 case MINUS:
1876 tcode = MINUS_EXPR;
1877 break;
1878 case MULT:
1879 tcode = MULT_EXPR;
1880 break;
1881 case DIV:
1882 tcode = RDIV_EXPR;
1883 break;
1884 case SMIN:
1885 tcode = MIN_EXPR;
1886 break;
1887 case SMAX:
1888 tcode = MAX_EXPR;
1889 break;
1890 default:
1891 tcode = LAST_AND_UNUSED_TREE_CODE;
1892 break;
1893 }
1894 return ((int) tcode);
1895}
1f3233d1 1896
1897#include "gt-explow.h"