]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
target-insns.def (prefetch): New targetm instruction pattern.
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
5624e564 2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
18ca7dab 3
1322177d 4This file is part of GCC.
18ca7dab 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
18ca7dab 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
18ca7dab
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
18ca7dab
RK
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6
ZW
23#include "coretypes.h"
24#include "tm.h"
718f9c0f 25#include "diagnostic-core.h"
18ca7dab 26#include "rtl.h"
40e23961
MC
27#include "alias.h"
28#include "symtab.h"
18ca7dab 29#include "tree.h"
d8a2d370 30#include "stor-layout.h"
6baf1cc8 31#include "tm_p.h"
18ca7dab 32#include "flags.h"
b38f3813 33#include "except.h"
83685514 34#include "hard-reg-set.h"
49ad7cfa 35#include "function.h"
36566b39
PK
36#include "insn-config.h"
37#include "expmed.h"
38#include "dojump.h"
39#include "explow.h"
40#include "calls.h"
41#include "emit-rtl.h"
42#include "varasm.h"
43#include "stmt.h"
18ca7dab 44#include "expr.h"
b0710fe1 45#include "insn-codes.h"
e78d8e51 46#include "optabs.h"
d477d1fe 47#include "libfuncs.h"
18ca7dab 48#include "recog.h"
a77a9a18 49#include "langhooks.h"
1d636cc6 50#include "target.h"
677f3fa8 51#include "common/common-target.h"
aacd3885 52#include "output.h"
18ca7dab 53
502b8322 54static rtx break_out_memory_refs (rtx);
7e4ce834
RH
55
56
57/* Truncate and perhaps sign-extend C as appropriate for MODE. */
58
59HOST_WIDE_INT
ef4bddc2 60trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
7e4ce834 61{
5511bc5a 62 int width = GET_MODE_PRECISION (mode);
7e4ce834 63
71012d97 64 /* You want to truncate to a _what_? */
d5e254e1
IE
65 gcc_assert (SCALAR_INT_MODE_P (mode)
66 || POINTER_BOUNDS_MODE_P (mode));
71012d97 67
1f3f36d1
RH
68 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
69 if (mode == BImode)
70 return c & 1 ? STORE_FLAG_VALUE : 0;
71
5b0d91c3
AO
72 /* Sign-extend for the requested mode. */
73
74 if (width < HOST_BITS_PER_WIDE_INT)
75 {
76 HOST_WIDE_INT sign = 1;
77 sign <<= width - 1;
78 c &= (sign << 1) - 1;
79 c ^= sign;
80 c -= sign;
81 }
7e4ce834
RH
82
83 return c;
84}
85
929e10f4 86/* Return an rtx for the sum of X and the integer C, given that X has
23b33725
RS
87 mode MODE. INPLACE is true if X can be modified inplace or false
88 if it must be treated as immutable. */
18ca7dab
RK
89
90rtx
ef4bddc2 91plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
23b33725 92 bool inplace)
18ca7dab 93{
b3694847 94 RTX_CODE code;
17ab7c59 95 rtx y;
b3694847 96 rtx tem;
18ca7dab
RK
97 int all_constant = 0;
98
0a81f074
RS
99 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
100
18ca7dab
RK
101 if (c == 0)
102 return x;
103
104 restart:
105
106 code = GET_CODE (x);
17ab7c59
RK
107 y = x;
108
18ca7dab
RK
109 switch (code)
110 {
807e902e
KZ
111 CASE_CONST_SCALAR_INT:
112 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
113 mode);
18ca7dab
RK
114 case MEM:
115 /* If this is a reference to the constant pool, try replacing it with
116 a reference to a new constant. If the resulting address isn't
117 valid, don't return it because we have no way to validize it. */
118 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
119 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
120 {
0a81f074 121 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
929e10f4 122 tem = force_const_mem (GET_MODE (x), tem);
2c19378b
AB
123 /* Targets may disallow some constants in the constant pool, thus
124 force_const_mem may return NULL_RTX. */
125 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
18ca7dab
RK
126 return tem;
127 }
128 break;
129
130 case CONST:
131 /* If adding to something entirely constant, set a flag
132 so that we can add a CONST around the result. */
23b33725
RS
133 if (inplace && shared_const_p (x))
134 inplace = false;
18ca7dab
RK
135 x = XEXP (x, 0);
136 all_constant = 1;
137 goto restart;
138
139 case SYMBOL_REF:
140 case LABEL_REF:
141 all_constant = 1;
142 break;
143
144 case PLUS:
929e10f4
MS
145 /* The interesting case is adding the integer to a sum. Look
146 for constant term in the sum and combine with C. For an
147 integer constant term or a constant term that is not an
148 explicit integer, we combine or group them together anyway.
03d937fc
R
149
150 We may not immediately return from the recursive call here, lest
151 all_constant gets lost. */
e5671f2b 152
929e10f4 153 if (CONSTANT_P (XEXP (x, 1)))
03d937fc 154 {
23b33725
RS
155 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
156 if (term == const0_rtx)
157 x = XEXP (x, 0);
158 else if (inplace)
159 XEXP (x, 1) = term;
160 else
161 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
03d937fc
R
162 c = 0;
163 }
23b33725 164 else if (rtx *const_loc = find_constant_term_loc (&y))
03d937fc 165 {
23b33725
RS
166 if (!inplace)
167 {
168 /* We need to be careful since X may be shared and we can't
169 modify it in place. */
170 x = copy_rtx (x);
171 const_loc = find_constant_term_loc (&x);
172 }
173 *const_loc = plus_constant (mode, *const_loc, c, true);
03d937fc
R
174 c = 0;
175 }
38a448ca 176 break;
ed8908e7 177
38a448ca
RH
178 default:
179 break;
18ca7dab
RK
180 }
181
182 if (c != 0)
4789c0ce 183 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
18ca7dab
RK
184
185 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
186 return x;
187 else if (all_constant)
38a448ca 188 return gen_rtx_CONST (mode, x);
18ca7dab
RK
189 else
190 return x;
191}
18ca7dab
RK
192\f
193/* If X is a sum, return a new sum like X but lacking any constant terms.
194 Add all the removed constant terms into *CONSTPTR.
195 X itself is not altered. The result != X if and only if
196 it is not isomorphic to X. */
197
198rtx
502b8322 199eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 200{
b3694847 201 rtx x0, x1;
18ca7dab
RK
202 rtx tem;
203
204 if (GET_CODE (x) != PLUS)
205 return x;
206
207 /* First handle constants appearing at this level explicitly. */
481683e1 208 if (CONST_INT_P (XEXP (x, 1))
18ca7dab
RK
209 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
210 XEXP (x, 1)))
481683e1 211 && CONST_INT_P (tem))
18ca7dab
RK
212 {
213 *constptr = tem;
214 return eliminate_constant_term (XEXP (x, 0), constptr);
215 }
216
217 tem = const0_rtx;
218 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
219 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
220 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
221 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
222 *constptr, tem))
481683e1 223 && CONST_INT_P (tem))
18ca7dab
RK
224 {
225 *constptr = tem;
38a448ca 226 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
227 }
228
229 return x;
230}
231
18ca7dab
RK
232\f
233/* Return a copy of X in which all memory references
234 and all constants that involve symbol refs
235 have been replaced with new temporary registers.
236 Also emit code to load the memory locations and constants
237 into those registers.
238
239 If X contains no such constants or memory references,
240 X itself (not a copy) is returned.
241
242 If a constant is found in the address that is not a legitimate constant
243 in an insn, it is left alone in the hope that it might be valid in the
244 address.
245
246 X may contain no arithmetic except addition, subtraction and multiplication.
247 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
248
249static rtx
502b8322 250break_out_memory_refs (rtx x)
18ca7dab 251{
3c0cb5de 252 if (MEM_P (x)
cabeca29 253 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 254 && GET_MODE (x) != VOIDmode))
2cca6e3f 255 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
256 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
257 || GET_CODE (x) == MULT)
258 {
b3694847
SS
259 rtx op0 = break_out_memory_refs (XEXP (x, 0));
260 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 261
18ca7dab 262 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 263 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 264 }
2cca6e3f 265
18ca7dab
RK
266 return x;
267}
268
d4ebfa65
BE
269/* Given X, a memory address in address space AS' pointer mode, convert it to
270 an address in the address space's address mode, or vice versa (TO_MODE says
271 which way). We take advantage of the fact that pointers are not allowed to
272 overflow by commuting arithmetic operations over conversions so that address
7745730f
AP
273 arithmetic insns can be used. IN_CONST is true if this conversion is inside
274 a CONST. */
ea534b63 275
7745730f 276static rtx
ef4bddc2 277convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
7745730f 278 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
c582bb15 279 bool in_const ATTRIBUTE_UNUSED)
ea534b63 280{
5ae6cd0d 281#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 282 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
283 return x;
284#else /* defined(POINTERS_EXTEND_UNSIGNED) */
ef4bddc2 285 machine_mode pointer_mode, address_mode, from_mode;
498b529f 286 rtx temp;
aa0f70e6 287 enum rtx_code code;
498b529f 288
5ae6cd0d
MM
289 /* If X already has the right mode, just return it. */
290 if (GET_MODE (x) == to_mode)
291 return x;
292
d4ebfa65
BE
293 pointer_mode = targetm.addr_space.pointer_mode (as);
294 address_mode = targetm.addr_space.address_mode (as);
295 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 296
0b04ec8c
RK
297 /* Here we handle some special cases. If none of them apply, fall through
298 to the default case. */
ea534b63
RK
299 switch (GET_CODE (x))
300 {
d8116890 301 CASE_CONST_SCALAR_INT:
aa0f70e6
SE
302 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
303 code = TRUNCATE;
304 else if (POINTERS_EXTEND_UNSIGNED < 0)
305 break;
306 else if (POINTERS_EXTEND_UNSIGNED > 0)
307 code = ZERO_EXTEND;
308 else
309 code = SIGN_EXTEND;
310 temp = simplify_unary_operation (code, to_mode, x, from_mode);
311 if (temp)
312 return temp;
313 break;
498b529f 314
d1405722 315 case SUBREG:
5da4f548 316 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 317 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
318 return SUBREG_REG (x);
319 break;
320
ea534b63 321 case LABEL_REF:
a827d9b1 322 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
5da4f548
SE
323 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
324 return temp;
6dd12198 325 break;
498b529f 326
ea534b63 327 case SYMBOL_REF:
ce02ba25
EC
328 temp = shallow_copy_rtx (x);
329 PUT_MODE (temp, to_mode);
5da4f548 330 return temp;
6dd12198 331 break;
ea534b63 332
498b529f 333 case CONST:
5da4f548 334 return gen_rtx_CONST (to_mode,
7745730f
AP
335 convert_memory_address_addr_space_1
336 (to_mode, XEXP (x, 0), as, true));
6dd12198 337 break;
ea534b63 338
0b04ec8c
RK
339 case PLUS:
340 case MULT:
ceeb2cbc
AP
341 /* For addition we can safely permute the conversion and addition
342 operation if one operand is a constant and converting the constant
343 does not change it or if one operand is a constant and we are
344 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
17939c98 345 We can always safely permute them if we are making the address
7745730f
AP
346 narrower. Inside a CONST RTL, this is safe for both pointers
347 zero or sign extended as pointers cannot wrap. */
aa0f70e6
SE
348 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
349 || (GET_CODE (x) == PLUS
481683e1 350 && CONST_INT_P (XEXP (x, 1))
7745730f
AP
351 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
352 || XEXP (x, 1) == convert_memory_address_addr_space_1
353 (to_mode, XEXP (x, 1), as, in_const)
354 || POINTERS_EXTEND_UNSIGNED < 0)))
d9b3eb63 355 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
7745730f
AP
356 convert_memory_address_addr_space_1
357 (to_mode, XEXP (x, 0), as, in_const),
aa0f70e6 358 XEXP (x, 1));
38a448ca 359 break;
d9b3eb63 360
38a448ca
RH
361 default:
362 break;
ea534b63 363 }
0b04ec8c
RK
364
365 return convert_modes (to_mode, from_mode,
366 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 367#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 368}
7745730f
AP
369
370/* Given X, a memory address in address space AS' pointer mode, convert it to
371 an address in the address space's address mode, or vice versa (TO_MODE says
372 which way). We take advantage of the fact that pointers are not allowed to
373 overflow by commuting arithmetic operations over conversions so that address
374 arithmetic insns can be used. */
375
376rtx
ef4bddc2 377convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
7745730f
AP
378{
379 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
380}
18ca7dab 381\f
36566b39 382
09e881c9
BE
383/* Return something equivalent to X but valid as a memory address for something
384 of mode MODE in the named address space AS. When X is not itself valid,
385 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
386
387rtx
ef4bddc2 388memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
18ca7dab 389{
b3694847 390 rtx oldx = x;
ef4bddc2 391 machine_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 392
d4ebfa65 393 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 394
ba228239 395 /* By passing constant addresses through registers
18ca7dab 396 we get a chance to cse them. */
cabeca29 397 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 398 x = force_reg (address_mode, x);
18ca7dab 399
18ca7dab
RK
400 /* We get better cse by rejecting indirect addressing at this stage.
401 Let the combiner create indirect addresses where appropriate.
402 For now, generate the code so that the subexpressions useful to share
403 are visible. But not if cse won't be done! */
18b9ca6f 404 else
18ca7dab 405 {
f8cfc6aa 406 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
407 x = break_out_memory_refs (x);
408
409 /* At this point, any valid address is accepted. */
09e881c9 410 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 411 goto done;
18b9ca6f
RK
412
413 /* If it was valid before but breaking out memory refs invalidated it,
414 use it the old way. */
09e881c9 415 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
416 {
417 x = oldx;
418 goto done;
419 }
18b9ca6f
RK
420
421 /* Perform machine-dependent transformations on X
422 in certain cases. This is not necessary since the code
423 below can handle all possible cases, but machine-dependent
424 transformations can make better code. */
506d7b68 425 {
09e881c9
BE
426 rtx orig_x = x;
427 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
428 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
429 goto done;
430 }
18b9ca6f
RK
431
432 /* PLUS and MULT can appear in special ways
433 as the result of attempts to make an address usable for indexing.
434 Usually they are dealt with by calling force_operand, below.
435 But a sum containing constant terms is special
436 if removing them makes the sum a valid address:
437 then we generate that address in a register
438 and index off of it. We do this because it often makes
439 shorter code, and because the addresses thus generated
440 in registers often become common subexpressions. */
441 if (GET_CODE (x) == PLUS)
442 {
443 rtx constant_term = const0_rtx;
444 rtx y = eliminate_constant_term (x, &constant_term);
445 if (constant_term == const0_rtx
09e881c9 446 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
447 x = force_operand (x, NULL_RTX);
448 else
449 {
38a448ca 450 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 451 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
452 x = force_operand (x, NULL_RTX);
453 else
454 x = y;
455 }
456 }
18ca7dab 457
e475ed2a 458 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 459 x = force_operand (x, NULL_RTX);
18ca7dab 460
18b9ca6f
RK
461 /* If we have a register that's an invalid address,
462 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 463 else if (REG_P (x))
18b9ca6f
RK
464 x = copy_to_reg (x);
465
466 /* Last resort: copy the value to a register, since
467 the register is a valid address. */
468 else
d4ebfa65 469 x = force_reg (address_mode, x);
18ca7dab 470 }
18b9ca6f
RK
471
472 done:
473
09e881c9 474 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
475 /* If we didn't change the address, we are done. Otherwise, mark
476 a reg as a pointer if we have REG or REG + CONST_INT. */
477 if (oldx == x)
478 return x;
f8cfc6aa 479 else if (REG_P (x))
bdb429a5 480 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 481 else if (GET_CODE (x) == PLUS
f8cfc6aa 482 && REG_P (XEXP (x, 0))
481683e1 483 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 484 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 485
18b9ca6f
RK
486 /* OLDX may have been the address on a temporary. Update the address
487 to indicate that X is now used. */
488 update_temp_slot_address (oldx, x);
489
18ca7dab
RK
490 return x;
491}
492
1a8cb155
RS
493/* If REF is a MEM with an invalid address, change it into a valid address.
494 Pass through anything else unchanged. REF must be an unshared rtx and
495 the function may modify it in-place. */
18ca7dab
RK
496
497rtx
502b8322 498validize_mem (rtx ref)
18ca7dab 499{
3c0cb5de 500 if (!MEM_P (ref))
18ca7dab 501 return ref;
aacd3885 502 ref = use_anchored_address (ref);
09e881c9
BE
503 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
504 MEM_ADDR_SPACE (ref)))
18ca7dab 505 return ref;
792760b9 506
1a8cb155 507 return replace_equiv_address (ref, XEXP (ref, 0), true);
18ca7dab 508}
aacd3885
RS
509
510/* If X is a memory reference to a member of an object block, try rewriting
511 it to use an anchor instead. Return the new memory reference on success
512 and the old one on failure. */
513
514rtx
515use_anchored_address (rtx x)
516{
517 rtx base;
518 HOST_WIDE_INT offset;
ef4bddc2 519 machine_mode mode;
aacd3885
RS
520
521 if (!flag_section_anchors)
522 return x;
523
524 if (!MEM_P (x))
525 return x;
526
527 /* Split the address into a base and offset. */
528 base = XEXP (x, 0);
529 offset = 0;
530 if (GET_CODE (base) == CONST
531 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 532 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
533 {
534 offset += INTVAL (XEXP (XEXP (base, 0), 1));
535 base = XEXP (XEXP (base, 0), 0);
536 }
537
538 /* Check whether BASE is suitable for anchors. */
539 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 540 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 541 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 542 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
543 || !targetm.use_anchors_for_symbol_p (base))
544 return x;
545
546 /* Decide where BASE is going to be. */
547 place_block_symbol (base);
548
549 /* Get the anchor we need to use. */
550 offset += SYMBOL_REF_BLOCK_OFFSET (base);
551 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
552 SYMBOL_REF_TLS_MODEL (base));
553
554 /* Work out the offset from the anchor. */
555 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
556
557 /* If we're going to run a CSE pass, force the anchor into a register.
558 We will then be able to reuse registers for several accesses, if the
559 target costs say that that's worthwhile. */
0a81f074 560 mode = GET_MODE (base);
aacd3885 561 if (!cse_not_expected)
0a81f074 562 base = force_reg (mode, base);
aacd3885 563
0a81f074 564 return replace_equiv_address (x, plus_constant (mode, base, offset));
aacd3885 565}
18ca7dab 566\f
18ca7dab
RK
567/* Copy the value or contents of X to a new temp reg and return that reg. */
568
569rtx
502b8322 570copy_to_reg (rtx x)
18ca7dab 571{
b3694847 572 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 573
18ca7dab 574 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 575 do the computation. */
18ca7dab
RK
576 if (! general_operand (x, VOIDmode))
577 x = force_operand (x, temp);
d9b3eb63 578
18ca7dab
RK
579 if (x != temp)
580 emit_move_insn (temp, x);
581
582 return temp;
583}
584
585/* Like copy_to_reg but always give the new register mode Pmode
586 in case X is a constant. */
587
588rtx
502b8322 589copy_addr_to_reg (rtx x)
18ca7dab
RK
590{
591 return copy_to_mode_reg (Pmode, x);
592}
593
594/* Like copy_to_reg but always give the new register mode MODE
595 in case X is a constant. */
596
597rtx
ef4bddc2 598copy_to_mode_reg (machine_mode mode, rtx x)
18ca7dab 599{
b3694847 600 rtx temp = gen_reg_rtx (mode);
d9b3eb63 601
18ca7dab 602 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 603 do the computation. */
18ca7dab
RK
604 if (! general_operand (x, VOIDmode))
605 x = force_operand (x, temp);
606
5b0264cb 607 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
608 if (x != temp)
609 emit_move_insn (temp, x);
610 return temp;
611}
612
613/* Load X into a register if it is not already one.
614 Use mode MODE for the register.
615 X should be valid for mode MODE, but it may be a constant which
616 is valid for all integer modes; that's why caller must specify MODE.
617
618 The caller must not alter the value in the register we return,
619 since we mark it as a "constant" register. */
620
621rtx
ef4bddc2 622force_reg (machine_mode mode, rtx x)
18ca7dab 623{
528a80c1
DM
624 rtx temp, set;
625 rtx_insn *insn;
18ca7dab 626
f8cfc6aa 627 if (REG_P (x))
18ca7dab 628 return x;
d9b3eb63 629
e3c8ea67
RH
630 if (general_operand (x, mode))
631 {
632 temp = gen_reg_rtx (mode);
633 insn = emit_move_insn (temp, x);
634 }
635 else
636 {
637 temp = force_operand (x, NULL_RTX);
f8cfc6aa 638 if (REG_P (temp))
e3c8ea67
RH
639 insn = get_last_insn ();
640 else
641 {
642 rtx temp2 = gen_reg_rtx (mode);
643 insn = emit_move_insn (temp2, temp);
644 temp = temp2;
645 }
646 }
62874575 647
18ca7dab 648 /* Let optimizers know that TEMP's value never changes
62874575
RK
649 and that X can be substituted for it. Don't get confused
650 if INSN set something else (such as a SUBREG of TEMP). */
651 if (CONSTANT_P (x)
652 && (set = single_set (insn)) != 0
fd7acc30
RS
653 && SET_DEST (set) == temp
654 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 655 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 656
4a4f95d9
RH
657 /* Let optimizers know that TEMP is a pointer, and if so, the
658 known alignment of that pointer. */
659 {
660 unsigned align = 0;
661 if (GET_CODE (x) == SYMBOL_REF)
662 {
663 align = BITS_PER_UNIT;
664 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
665 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
666 }
667 else if (GET_CODE (x) == LABEL_REF)
668 align = BITS_PER_UNIT;
669 else if (GET_CODE (x) == CONST
670 && GET_CODE (XEXP (x, 0)) == PLUS
671 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 672 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
673 {
674 rtx s = XEXP (XEXP (x, 0), 0);
675 rtx c = XEXP (XEXP (x, 0), 1);
676 unsigned sa, ca;
677
678 sa = BITS_PER_UNIT;
679 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
680 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
681
bd95721f
RH
682 if (INTVAL (c) == 0)
683 align = sa;
684 else
685 {
686 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
687 align = MIN (sa, ca);
688 }
4a4f95d9
RH
689 }
690
0a317111 691 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
692 mark_reg_pointer (temp, align);
693 }
694
18ca7dab
RK
695 return temp;
696}
697
698/* If X is a memory ref, copy its contents to a new temp reg and return
699 that reg. Otherwise, return X. */
700
701rtx
502b8322 702force_not_mem (rtx x)
18ca7dab 703{
b3694847 704 rtx temp;
fe3439b0 705
3c0cb5de 706 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 707 return x;
fe3439b0 708
18ca7dab 709 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
710
711 if (MEM_POINTER (x))
712 REG_POINTER (temp) = 1;
713
18ca7dab
RK
714 emit_move_insn (temp, x);
715 return temp;
716}
717
718/* Copy X to TARGET (if it's nonzero and a reg)
719 or to a new temp reg and return that reg.
720 MODE is the mode to use for X in case it is a constant. */
721
722rtx
ef4bddc2 723copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
18ca7dab 724{
b3694847 725 rtx temp;
18ca7dab 726
f8cfc6aa 727 if (target && REG_P (target))
18ca7dab
RK
728 temp = target;
729 else
730 temp = gen_reg_rtx (mode);
731
732 emit_move_insn (temp, x);
733 return temp;
734}
735\f
cde0f3fd 736/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
737 PUNSIGNEDP points to the signedness of the type and may be adjusted
738 to show what signedness to use on extension operations.
739
cde0f3fd
PB
740 FOR_RETURN is nonzero if the caller is promoting the return value
741 of FNDECL, else it is for promoting args. */
9ff65789 742
ef4bddc2
RS
743machine_mode
744promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
cde0f3fd
PB
745 const_tree funtype, int for_return)
746{
5e617be8
AK
747 /* Called without a type node for a libcall. */
748 if (type == NULL_TREE)
749 {
750 if (INTEGRAL_MODE_P (mode))
751 return targetm.calls.promote_function_mode (NULL_TREE, mode,
752 punsignedp, funtype,
753 for_return);
754 else
755 return mode;
756 }
757
cde0f3fd
PB
758 switch (TREE_CODE (type))
759 {
760 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
761 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
762 case POINTER_TYPE: case REFERENCE_TYPE:
763 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
764 for_return);
765
766 default:
767 return mode;
768 }
769}
770/* Return the mode to use to store a scalar of TYPE and MODE.
771 PUNSIGNEDP points to the signedness of the type and may be adjusted
772 to show what signedness to use on extension operations. */
d4453b7a 773
ef4bddc2
RS
774machine_mode
775promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
b1680483 776 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 777{
1e3287d0
RG
778#ifdef PROMOTE_MODE
779 enum tree_code code;
780 int unsignedp;
781#endif
782
5e617be8
AK
783 /* For libcalls this is invoked without TYPE from the backends
784 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
785 case. */
786 if (type == NULL_TREE)
787 return mode;
788
cde0f3fd
PB
789 /* FIXME: this is the same logic that was there until GCC 4.4, but we
790 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
791 is not defined. The affected targets are M32C, S390, SPARC. */
792#ifdef PROMOTE_MODE
1e3287d0
RG
793 code = TREE_CODE (type);
794 unsignedp = *punsignedp;
9ff65789 795
9ff65789
RK
796 switch (code)
797 {
9ff65789 798 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 799 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
cde0f3fd
PB
800 PROMOTE_MODE (mode, unsignedp, type);
801 *punsignedp = unsignedp;
802 return mode;
9ff65789 803 break;
9ff65789 804
ea534b63 805#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 806 case REFERENCE_TYPE:
9ff65789 807 case POINTER_TYPE:
cde0f3fd 808 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
809 return targetm.addr_space.address_mode
810 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
9ff65789 811 break;
ea534b63 812#endif
d9b3eb63 813
38a448ca 814 default:
cde0f3fd 815 return mode;
9ff65789 816 }
cde0f3fd 817#else
9ff65789 818 return mode;
cde0f3fd 819#endif
9ff65789 820}
cde0f3fd
PB
821
822
823/* Use one of promote_mode or promote_function_mode to find the promoted
824 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
825 of DECL after promotion. */
826
ef4bddc2 827machine_mode
cde0f3fd
PB
828promote_decl_mode (const_tree decl, int *punsignedp)
829{
830 tree type = TREE_TYPE (decl);
831 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
832 machine_mode mode = DECL_MODE (decl);
833 machine_mode pmode;
cde0f3fd 834
666e3ceb
PB
835 if (TREE_CODE (decl) == RESULT_DECL
836 || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 837 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 838 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
839 else
840 pmode = promote_mode (type, mode, &unsignedp);
841
842 if (punsignedp)
843 *punsignedp = unsignedp;
844 return pmode;
845}
846
9ff65789 847\f
9a08d230
RH
848/* Controls the behaviour of {anti_,}adjust_stack. */
849static bool suppress_reg_args_size;
850
851/* A helper for adjust_stack and anti_adjust_stack. */
852
853static void
854adjust_stack_1 (rtx adjust, bool anti_p)
855{
528a80c1
DM
856 rtx temp;
857 rtx_insn *insn;
9a08d230 858
9a08d230 859 /* Hereafter anti_p means subtract_p. */
581edfa3
TS
860 if (!STACK_GROWS_DOWNWARD)
861 anti_p = !anti_p;
9a08d230
RH
862
863 temp = expand_binop (Pmode,
864 anti_p ? sub_optab : add_optab,
865 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
866 OPTAB_LIB_WIDEN);
867
868 if (temp != stack_pointer_rtx)
869 insn = emit_move_insn (stack_pointer_rtx, temp);
870 else
871 {
872 insn = get_last_insn ();
873 temp = single_set (insn);
874 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
875 }
876
877 if (!suppress_reg_args_size)
878 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
879}
880
18ca7dab
RK
881/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
882 This pops when ADJUST is positive. ADJUST need not be constant. */
883
884void
502b8322 885adjust_stack (rtx adjust)
18ca7dab 886{
18ca7dab
RK
887 if (adjust == const0_rtx)
888 return;
889
1503a7ec
JH
890 /* We expect all variable sized adjustments to be multiple of
891 PREFERRED_STACK_BOUNDARY. */
481683e1 892 if (CONST_INT_P (adjust))
1503a7ec
JH
893 stack_pointer_delta -= INTVAL (adjust);
894
9a08d230 895 adjust_stack_1 (adjust, false);
18ca7dab
RK
896}
897
898/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
899 This pushes when ADJUST is positive. ADJUST need not be constant. */
900
901void
502b8322 902anti_adjust_stack (rtx adjust)
18ca7dab 903{
18ca7dab
RK
904 if (adjust == const0_rtx)
905 return;
906
1503a7ec
JH
907 /* We expect all variable sized adjustments to be multiple of
908 PREFERRED_STACK_BOUNDARY. */
481683e1 909 if (CONST_INT_P (adjust))
1503a7ec
JH
910 stack_pointer_delta += INTVAL (adjust);
911
9a08d230 912 adjust_stack_1 (adjust, true);
18ca7dab
RK
913}
914
915/* Round the size of a block to be pushed up to the boundary required
916 by this machine. SIZE is the desired size, which need not be constant. */
917
4dd9b044 918static rtx
502b8322 919round_push (rtx size)
18ca7dab 920{
32990d5b 921 rtx align_rtx, alignm1_rtx;
41ee3204 922
32990d5b
JJ
923 if (!SUPPORTS_STACK_ALIGNMENT
924 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 925 {
32990d5b
JJ
926 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
927
928 if (align == 1)
929 return size;
930
931 if (CONST_INT_P (size))
932 {
933 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 934
32990d5b
JJ
935 if (INTVAL (size) != new_size)
936 size = GEN_INT (new_size);
937 return size;
938 }
939
940 align_rtx = GEN_INT (align);
941 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
942 }
943 else
944 {
32990d5b
JJ
945 /* If crtl->preferred_stack_boundary might still grow, use
946 virtual_preferred_stack_boundary_rtx instead. This will be
947 substituted by the right value in vregs pass and optimized
948 during combine. */
949 align_rtx = virtual_preferred_stack_boundary_rtx;
0a81f074
RS
950 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
951 NULL_RTX);
18ca7dab 952 }
41ee3204 953
32990d5b
JJ
954 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
955 but we know it can't. So add ourselves and then do
956 TRUNC_DIV_EXPR. */
957 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
958 NULL_RTX, 1, OPTAB_LIB_WIDEN);
959 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
960 NULL_RTX, 1);
961 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
962
18ca7dab
RK
963 return size;
964}
965\f
59257ff7
RK
966/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
967 to a previously-created save area. If no save area has been allocated,
968 this function will allocate one. If a save area is specified, it
9eac0f2a 969 must be of the proper mode. */
59257ff7
RK
970
971void
9eac0f2a 972emit_stack_save (enum save_level save_level, rtx *psave)
59257ff7
RK
973{
974 rtx sa = *psave;
975 /* The default is that we use a move insn and save in a Pmode object. */
1476d1bd 976 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
ef4bddc2 977 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
978
979 /* See if this machine has anything special to do for this kind of save. */
980 switch (save_level)
981 {
982#ifdef HAVE_save_stack_block
983 case SAVE_BLOCK:
984 if (HAVE_save_stack_block)
a260abc9 985 fcn = gen_save_stack_block;
59257ff7
RK
986 break;
987#endif
988#ifdef HAVE_save_stack_function
989 case SAVE_FUNCTION:
990 if (HAVE_save_stack_function)
a260abc9 991 fcn = gen_save_stack_function;
59257ff7
RK
992 break;
993#endif
994#ifdef HAVE_save_stack_nonlocal
995 case SAVE_NONLOCAL:
996 if (HAVE_save_stack_nonlocal)
a260abc9 997 fcn = gen_save_stack_nonlocal;
59257ff7
RK
998 break;
999#endif
38a448ca
RH
1000 default:
1001 break;
59257ff7
RK
1002 }
1003
1004 /* If there is no save area and we have to allocate one, do so. Otherwise
1005 verify the save area is the proper mode. */
1006
1007 if (sa == 0)
1008 {
1009 if (mode != VOIDmode)
1010 {
1011 if (save_level == SAVE_NONLOCAL)
1012 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1013 else
1014 *psave = sa = gen_reg_rtx (mode);
1015 }
1016 }
59257ff7 1017
9eac0f2a
RH
1018 do_pending_stack_adjust ();
1019 if (sa != 0)
1020 sa = validize_mem (sa);
1021 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
1022}
1023
1024/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
9eac0f2a 1025 area made by emit_stack_save. If it is zero, we have nothing to do. */
59257ff7
RK
1026
1027void
9eac0f2a 1028emit_stack_restore (enum save_level save_level, rtx sa)
59257ff7
RK
1029{
1030 /* The default is that we use a move insn. */
1476d1bd 1031 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
59257ff7 1032
50025f91
TV
1033 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1034 STACK_POINTER and HARD_FRAME_POINTER.
1035 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1036 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1037 aligned variables, which is reflected in ix86_can_eliminate.
1038 We normally still have the realigned STACK_POINTER that we can use.
1039 But if there is a stack restore still present at reload, it can trigger
1040 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1041 FRAME_POINTER into a hard reg.
1042 To prevent this situation, we force need_drap if we emit a stack
1043 restore. */
1044 if (SUPPORTS_STACK_ALIGNMENT)
1045 crtl->need_drap = true;
1046
59257ff7
RK
1047 /* See if this machine has anything special to do for this kind of save. */
1048 switch (save_level)
1049 {
1050#ifdef HAVE_restore_stack_block
1051 case SAVE_BLOCK:
1052 if (HAVE_restore_stack_block)
1053 fcn = gen_restore_stack_block;
1054 break;
1055#endif
1056#ifdef HAVE_restore_stack_function
1057 case SAVE_FUNCTION:
1058 if (HAVE_restore_stack_function)
1059 fcn = gen_restore_stack_function;
1060 break;
1061#endif
1062#ifdef HAVE_restore_stack_nonlocal
59257ff7
RK
1063 case SAVE_NONLOCAL:
1064 if (HAVE_restore_stack_nonlocal)
1065 fcn = gen_restore_stack_nonlocal;
1066 break;
1067#endif
38a448ca
RH
1068 default:
1069 break;
59257ff7
RK
1070 }
1071
d072107f 1072 if (sa != 0)
260f91c2
DJ
1073 {
1074 sa = validize_mem (sa);
1075 /* These clobbers prevent the scheduler from moving
1076 references to variable arrays below the code
4b7e68e7 1077 that deletes (pops) the arrays. */
c41c1387
RS
1078 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1079 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1080 }
d072107f 1081
a494ed43
EB
1082 discard_pending_stack_adjust ();
1083
9eac0f2a 1084 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1085}
6de9cd9a
DN
1086
1087/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
d33606c3
EB
1088 function. This should be called whenever we allocate or deallocate
1089 dynamic stack space. */
6de9cd9a
DN
1090
1091void
1092update_nonlocal_goto_save_area (void)
1093{
1094 tree t_save;
1095 rtx r_save;
1096
1097 /* The nonlocal_goto_save_area object is an array of N pointers. The
1098 first one is used for the frame pointer save; the rest are sized by
1099 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1100 of the stack save area slots. */
6bbec3e1
L
1101 t_save = build4 (ARRAY_REF,
1102 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1103 cfun->nonlocal_goto_save_area,
3244e67d 1104 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1105 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1106
9eac0f2a 1107 emit_stack_save (SAVE_NONLOCAL, &r_save);
6de9cd9a 1108}
d33606c3
EB
1109
1110/* Record a new stack level for the current function. This should be called
1111 whenever we allocate or deallocate dynamic stack space. */
1112
1113void
1114record_new_stack_level (void)
1115{
1116 /* Record the new stack level for nonlocal gotos. */
1117 if (cfun->nonlocal_goto_save_area)
1118 update_nonlocal_goto_save_area ();
1119
1120 /* Record the new stack level for SJLJ exceptions. */
1121 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1122 update_sjlj_context ();
1123}
59257ff7 1124\f
18ca7dab 1125/* Return an rtx representing the address of an area of memory dynamically
3a42502d 1126 pushed on the stack.
18ca7dab
RK
1127
1128 Any required stack pointer alignment is preserved.
1129
1130 SIZE is an rtx representing the size of the area.
091ad0b9 1131
3a42502d
RH
1132 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1133 parameter may be zero. If so, a proper value will be extracted
1134 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1135
1136 REQUIRED_ALIGN is the alignment (in bits) required for the region
1137 of memory.
d3c12306
EB
1138
1139 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1140 stack space allocated by the generated code cannot be added with itself
1141 in the course of the execution of the function. It is always safe to
1142 pass FALSE here and the following criterion is sufficient in order to
1143 pass TRUE: every path in the CFG that starts at the allocation point and
1144 loops to it executes the associated deallocation code. */
18ca7dab
RK
1145
1146rtx
3a42502d
RH
1147allocate_dynamic_stack_space (rtx size, unsigned size_align,
1148 unsigned required_align, bool cannot_accumulate)
18ca7dab 1149{
d3c12306 1150 HOST_WIDE_INT stack_usage_size = -1;
528a80c1
DM
1151 rtx_code_label *final_label;
1152 rtx final_target, target;
34831f3e 1153 unsigned extra_align = 0;
3a42502d 1154 bool must_align;
d3c12306 1155
15fc0026 1156 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1157 to since we can't dereference it. But return a reasonable
15fc0026
RK
1158 address anyway. */
1159 if (size == const0_rtx)
1160 return virtual_stack_dynamic_rtx;
1161
1162 /* Otherwise, show we're calling alloca or equivalent. */
e3b5732b 1163 cfun->calls_alloca = 1;
15fc0026 1164
d3c12306
EB
1165 /* If stack usage info is requested, look into the size we are passed.
1166 We need to do so this early to avoid the obfuscation that may be
1167 introduced later by the various alignment operations. */
a11e0df4 1168 if (flag_stack_usage_info)
d3c12306 1169 {
32990d5b 1170 if (CONST_INT_P (size))
d3c12306 1171 stack_usage_size = INTVAL (size);
32990d5b 1172 else if (REG_P (size))
d3c12306
EB
1173 {
1174 /* Look into the last emitted insn and see if we can deduce
1175 something for the register. */
528a80c1
DM
1176 rtx_insn *insn;
1177 rtx set, note;
d3c12306
EB
1178 insn = get_last_insn ();
1179 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1180 {
32990d5b 1181 if (CONST_INT_P (SET_SRC (set)))
d3c12306
EB
1182 stack_usage_size = INTVAL (SET_SRC (set));
1183 else if ((note = find_reg_equal_equiv_note (insn))
32990d5b 1184 && CONST_INT_P (XEXP (note, 0)))
d3c12306
EB
1185 stack_usage_size = INTVAL (XEXP (note, 0));
1186 }
1187 }
1188
1189 /* If the size is not constant, we can't say anything. */
1190 if (stack_usage_size == -1)
1191 {
1192 current_function_has_unbounded_dynamic_stack_size = 1;
1193 stack_usage_size = 0;
1194 }
1195 }
1196
18ca7dab
RK
1197 /* Ensure the size is in the proper mode. */
1198 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1199 size = convert_to_mode (Pmode, size, 1);
1200
3a42502d
RH
1201 /* Adjust SIZE_ALIGN, if needed. */
1202 if (CONST_INT_P (size))
1203 {
1204 unsigned HOST_WIDE_INT lsb;
1205
1206 lsb = INTVAL (size);
1207 lsb &= -lsb;
1208
1209 /* Watch out for overflow truncating to "unsigned". */
1210 if (lsb > UINT_MAX / BITS_PER_UNIT)
1211 size_align = 1u << (HOST_BITS_PER_INT - 1);
1212 else
1213 size_align = (unsigned)lsb * BITS_PER_UNIT;
1214 }
1215 else if (size_align < BITS_PER_UNIT)
1216 size_align = BITS_PER_UNIT;
1217
34831f3e
RH
1218 /* We can't attempt to minimize alignment necessary, because we don't
1219 know the final value of preferred_stack_boundary yet while executing
1220 this code. */
1221 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1222 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1223
18ca7dab 1224 /* We will need to ensure that the address we return is aligned to
34831f3e
RH
1225 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1226 always know its final value at this point in the compilation (it
1227 might depend on the size of the outgoing parameter lists, for
1228 example), so we must align the value to be returned in that case.
1229 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1230 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1231 We must also do an alignment operation on the returned value if
1232 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1233
1234 If we have to align, we must leave space in SIZE for the hole
1235 that might result from the alignment operation. */
1236
1237 must_align = (crtl->preferred_stack_boundary < required_align);
1238 if (must_align)
d3c12306 1239 {
34831f3e
RH
1240 if (required_align > PREFERRED_STACK_BOUNDARY)
1241 extra_align = PREFERRED_STACK_BOUNDARY;
1242 else if (required_align > STACK_BOUNDARY)
1243 extra_align = STACK_BOUNDARY;
1244 else
1245 extra_align = BITS_PER_UNIT;
1ecad98e
EB
1246 }
1247
34831f3e
RH
1248 /* ??? STACK_POINTER_OFFSET is always defined now. */
1249#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1250 must_align = true;
1251 extra_align = BITS_PER_UNIT;
1252#endif
1ecad98e 1253
34831f3e
RH
1254 if (must_align)
1255 {
1256 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
3a42502d 1257
0a81f074 1258 size = plus_constant (Pmode, size, extra);
3a42502d 1259 size = force_operand (size, NULL_RTX);
d3c12306 1260
a11e0df4 1261 if (flag_stack_usage_info)
3a42502d 1262 stack_usage_size += extra;
34831f3e 1263
3a42502d
RH
1264 if (extra && size_align > extra_align)
1265 size_align = extra_align;
d3c12306 1266 }
1d9d04f8 1267
18ca7dab 1268 /* Round the size to a multiple of the required stack alignment.
34831f3e 1269 Since the stack if presumed to be rounded before this allocation,
18ca7dab
RK
1270 this will maintain the required alignment.
1271
1272 If the stack grows downward, we could save an insn by subtracting
1273 SIZE from the stack pointer and then aligning the stack pointer.
1274 The problem with this is that the stack pointer may be unaligned
1275 between the execution of the subtraction and alignment insns and
1276 some machines do not allow this. Even on those that do, some
1277 signal handlers malfunction if a signal should occur between those
1278 insns. Since this is an extremely rare event, we have no reliable
1279 way of knowing which systems have this problem. So we avoid even
1280 momentarily mis-aligning the stack. */
3a42502d 1281 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1282 {
1283 size = round_push (size);
18ca7dab 1284
a11e0df4 1285 if (flag_stack_usage_info)
d3c12306 1286 {
32990d5b 1287 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
d3c12306
EB
1288 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1289 }
1290 }
1291
3a42502d 1292 target = gen_reg_rtx (Pmode);
7458026b 1293
d3c12306
EB
1294 /* The size is supposed to be fully adjusted at this point so record it
1295 if stack usage info is requested. */
a11e0df4 1296 if (flag_stack_usage_info)
d3c12306
EB
1297 {
1298 current_function_dynamic_stack_size += stack_usage_size;
1299
1300 /* ??? This is gross but the only safe stance in the absence
1301 of stack usage oriented flow analysis. */
1302 if (!cannot_accumulate)
1303 current_function_has_unbounded_dynamic_stack_size = 1;
1304 }
18ca7dab 1305
528a80c1 1306 final_label = NULL;
7458026b
ILT
1307 final_target = NULL_RTX;
1308
1309 /* If we are splitting the stack, we need to ask the backend whether
1310 there is enough room on the current stack. If there isn't, or if
1311 the backend doesn't know how to tell is, then we need to call a
1312 function to allocate memory in some other way. This memory will
1313 be released when we release the current stack segment. The
1314 effect is that stack allocation becomes less efficient, but at
1315 least it doesn't cause a stack overflow. */
1316 if (flag_split_stack)
1317 {
528a80c1
DM
1318 rtx_code_label *available_label;
1319 rtx ask, space, func;
7458026b 1320
528a80c1 1321 available_label = NULL;
7458026b
ILT
1322
1323#ifdef HAVE_split_stack_space_check
1324 if (HAVE_split_stack_space_check)
1325 {
1326 available_label = gen_label_rtx ();
1327
1328 /* This instruction will branch to AVAILABLE_LABEL if there
1329 are SIZE bytes available on the stack. */
1330 emit_insn (gen_split_stack_space_check (size, available_label));
1331 }
1332#endif
1333
c3928dde 1334 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1335 memory using malloc. If the alignment of the memory returned
1336 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1337 make sure we allocate enough space. */
1338 if (MALLOC_ABI_ALIGNMENT >= required_align)
1339 ask = size;
1340 else
1341 {
1342 ask = expand_binop (Pmode, add_optab, size,
2f1cd2eb
RS
1343 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1344 Pmode),
c070a3b9
ILT
1345 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1346 must_align = true;
1347 }
c3928dde 1348
7458026b
ILT
1349 func = init_one_libfunc ("__morestack_allocate_stack_space");
1350
1351 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
c3928dde 1352 1, ask, Pmode);
7458026b
ILT
1353
1354 if (available_label == NULL_RTX)
1355 return space;
1356
1357 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1358
1359 emit_move_insn (final_target, space);
1360
1361 final_label = gen_label_rtx ();
1362 emit_jump (final_label);
1363
1364 emit_label (available_label);
1365 }
1366
18ca7dab
RK
1367 do_pending_stack_adjust ();
1368
1503a7ec 1369 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1370 properly. */
5b0264cb
NS
1371 gcc_assert (!(stack_pointer_delta
1372 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1503a7ec 1373
d809253a
EB
1374 /* If needed, check that we have the required amount of stack. Take into
1375 account what has already been checked. */
1376 if (STACK_CHECK_MOVING_SP)
1377 ;
1378 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1379 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1380 size);
1381 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1382 probe_stack_range (STACK_CHECK_PROTECT, size);
edff2491 1383
efec771a
RH
1384 /* Don't let anti_adjust_stack emit notes. */
1385 suppress_reg_args_size = true;
1386
18ca7dab
RK
1387 /* Perform the required allocation from the stack. Some systems do
1388 this differently than simply incrementing/decrementing from the
38a448ca 1389 stack pointer, such as acquiring the space by calling malloc(). */
18ca7dab
RK
1390#ifdef HAVE_allocate_stack
1391 if (HAVE_allocate_stack)
1392 {
a5c7d693 1393 struct expand_operand ops[2];
4b6c1672
RK
1394 /* We don't have to check against the predicate for operand 0 since
1395 TARGET is known to be a pseudo of the proper mode, which must
a5c7d693
RS
1396 be valid for the operand. */
1397 create_fixed_operand (&ops[0], target);
1398 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1399 expand_insn (CODE_FOR_allocate_stack, 2, ops);
18ca7dab
RK
1400 }
1401 else
1402#endif
ea534b63 1403 {
32990d5b
JJ
1404 int saved_stack_pointer_delta;
1405
581edfa3
TS
1406 if (!STACK_GROWS_DOWNWARD)
1407 emit_move_insn (target, virtual_stack_dynamic_rtx);
a157febd
GK
1408
1409 /* Check stack bounds if necessary. */
e3b5732b 1410 if (crtl->limit_stack)
a157febd
GK
1411 {
1412 rtx available;
528a80c1 1413 rtx_code_label *space_available = gen_label_rtx ();
581edfa3
TS
1414 if (STACK_GROWS_DOWNWARD)
1415 available = expand_binop (Pmode, sub_optab,
1416 stack_pointer_rtx, stack_limit_rtx,
1417 NULL_RTX, 1, OPTAB_WIDEN);
1418 else
1419 available = expand_binop (Pmode, sub_optab,
1420 stack_limit_rtx, stack_pointer_rtx,
1421 NULL_RTX, 1, OPTAB_WIDEN);
1422
a157febd 1423 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1424 space_available);
a157febd
GK
1425#ifdef HAVE_trap
1426 if (HAVE_trap)
1427 emit_insn (gen_trap ());
1428 else
1429#endif
1430 error ("stack limits not supported on this target");
1431 emit_barrier ();
1432 emit_label (space_available);
1433 }
1434
32990d5b 1435 saved_stack_pointer_delta = stack_pointer_delta;
9a08d230 1436
d809253a 1437 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1438 anti_adjust_stack_and_probe (size, false);
d809253a
EB
1439 else
1440 anti_adjust_stack (size);
9a08d230 1441
32990d5b
JJ
1442 /* Even if size is constant, don't modify stack_pointer_delta.
1443 The constant size alloca should preserve
1444 crtl->preferred_stack_boundary alignment. */
1445 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1446
581edfa3
TS
1447 if (STACK_GROWS_DOWNWARD)
1448 emit_move_insn (target, virtual_stack_dynamic_rtx);
38a448ca 1449 }
18ca7dab 1450
efec771a
RH
1451 suppress_reg_args_size = false;
1452
3a42502d
RH
1453 /* Finish up the split stack handling. */
1454 if (final_label != NULL_RTX)
1455 {
1456 gcc_assert (flag_split_stack);
1457 emit_move_insn (final_target, target);
1458 emit_label (final_label);
1459 target = final_target;
1460 }
1461
1462 if (must_align)
091ad0b9 1463 {
5244db05 1464 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1465 but we know it can't. So add ourselves and then do
1466 TRUNC_DIV_EXPR. */
0f56a403 1467 target = expand_binop (Pmode, add_optab, target,
2f1cd2eb
RS
1468 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1469 Pmode),
5244db05
RK
1470 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1471 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
2f1cd2eb
RS
1472 gen_int_mode (required_align / BITS_PER_UNIT,
1473 Pmode),
b1ec3c92 1474 NULL_RTX, 1);
091ad0b9 1475 target = expand_mult (Pmode, target,
2f1cd2eb
RS
1476 gen_int_mode (required_align / BITS_PER_UNIT,
1477 Pmode),
b1ec3c92 1478 NULL_RTX, 1);
091ad0b9 1479 }
d9b3eb63 1480
3a42502d
RH
1481 /* Now that we've committed to a return value, mark its alignment. */
1482 mark_reg_pointer (target, required_align);
1483
d33606c3
EB
1484 /* Record the new stack level. */
1485 record_new_stack_level ();
15fc0026 1486
18ca7dab
RK
1487 return target;
1488}
1489\f
d9b3eb63 1490/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1491 run-time routine to call to check the stack, so provide a mechanism for
1492 calling that routine. */
1493
e2500fed 1494static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1495
1496void
d477d1fe 1497set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1498{
d477d1fe
SB
1499 gcc_assert (stack_check_libfunc == NULL_RTX);
1500 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
14a774a9
RK
1501}
1502\f
edff2491
RK
1503/* Emit one stack probe at ADDRESS, an address within the stack. */
1504
260c8ba3 1505void
502b8322 1506emit_stack_probe (rtx address)
edff2491 1507{
7b84aac0
EB
1508#ifdef HAVE_probe_stack_address
1509 if (HAVE_probe_stack_address)
1510 emit_insn (gen_probe_stack_address (address));
1511 else
1512#endif
1513 {
1514 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491 1515
7b84aac0 1516 MEM_VOLATILE_P (memref) = 1;
edff2491 1517
7b84aac0 1518 /* See if we have an insn to probe the stack. */
d809253a 1519#ifdef HAVE_probe_stack
7b84aac0
EB
1520 if (HAVE_probe_stack)
1521 emit_insn (gen_probe_stack (memref));
1522 else
d809253a 1523#endif
7b84aac0
EB
1524 emit_move_insn (memref, const0_rtx);
1525 }
edff2491
RK
1526}
1527
d9b3eb63 1528/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1529 FIRST is a constant and size is a Pmode RTX. These are offsets from
1530 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1531 or subtract them from the stack pointer. */
1532
1533#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491 1534
62f9f30b 1535#if STACK_GROWS_DOWNWARD
edff2491 1536#define STACK_GROW_OP MINUS
d809253a
EB
1537#define STACK_GROW_OPTAB sub_optab
1538#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1539#else
1540#define STACK_GROW_OP PLUS
d809253a
EB
1541#define STACK_GROW_OPTAB add_optab
1542#define STACK_GROW_OFF(off) (off)
edff2491
RK
1543#endif
1544
1545void
502b8322 1546probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1547{
4b6c1672
RK
1548 /* First ensure SIZE is Pmode. */
1549 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1550 size = convert_to_mode (Pmode, size, 1);
1551
d809253a
EB
1552 /* Next see if we have a function to check the stack. */
1553 if (stack_check_libfunc)
f5f5363f 1554 {
d809253a 1555 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1556 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1557 stack_pointer_rtx,
0a81f074
RS
1558 plus_constant (Pmode,
1559 size, first)));
949fa04c
EB
1560 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1561 Pmode);
f5f5363f 1562 }
14a774a9 1563
d809253a 1564 /* Next see if we have an insn to check the stack. */
edff2491 1565#ifdef HAVE_check_stack
d6a6a07a 1566 else if (HAVE_check_stack)
edff2491 1567 {
a5c7d693 1568 struct expand_operand ops[1];
d809253a
EB
1569 rtx addr = memory_address (Pmode,
1570 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1571 stack_pointer_rtx,
0a81f074
RS
1572 plus_constant (Pmode,
1573 size, first)));
d6a6a07a 1574 bool success;
a5c7d693 1575 create_input_operand (&ops[0], addr, Pmode);
d6a6a07a
EB
1576 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1577 gcc_assert (success);
edff2491
RK
1578 }
1579#endif
1580
d809253a
EB
1581 /* Otherwise we have to generate explicit probes. If we have a constant
1582 small number of them to generate, that's the easy case. */
1583 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1584 {
d809253a
EB
1585 HOST_WIDE_INT isize = INTVAL (size), i;
1586 rtx addr;
1587
1588 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1589 it exceeds SIZE. If only one probe is needed, this will not
1590 generate any code. Then probe at FIRST + SIZE. */
1591 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1592 {
1593 addr = memory_address (Pmode,
0a81f074 1594 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1595 STACK_GROW_OFF (first + i)));
1596 emit_stack_probe (addr);
1597 }
1598
1599 addr = memory_address (Pmode,
0a81f074 1600 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1601 STACK_GROW_OFF (first + isize)));
1602 emit_stack_probe (addr);
edff2491
RK
1603 }
1604
d809253a
EB
1605 /* In the variable case, do the same as above, but in a loop. Note that we
1606 must be extra careful with variables wrapping around because we might be
1607 at the very top (or the very bottom) of the address space and we have to
1608 be able to handle this case properly; in particular, we use an equality
1609 test for the loop condition. */
edff2491
RK
1610 else
1611 {
d809253a 1612 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
528a80c1
DM
1613 rtx_code_label *loop_lab = gen_label_rtx ();
1614 rtx_code_label *end_lab = gen_label_rtx ();
edff2491 1615
d809253a
EB
1616 /* Step 1: round SIZE to the previous multiple of the interval. */
1617
1618 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1619 rounded_size
69a59f0f
RS
1620 = simplify_gen_binary (AND, Pmode, size,
1621 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1622 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1623
1624
1625 /* Step 2: compute initial and final value of the loop counter. */
1626
1627 /* TEST_ADDR = SP + FIRST. */
1628 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1629 stack_pointer_rtx,
4789c0ce
RS
1630 gen_int_mode (first, Pmode)),
1631 NULL_RTX);
d809253a
EB
1632
1633 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1634 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1635 test_addr,
1636 rounded_size_op), NULL_RTX);
1637
1638
1639 /* Step 3: the loop
1640
1641 while (TEST_ADDR != LAST_ADDR)
1642 {
1643 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1644 probe at TEST_ADDR
1645 }
1646
1647 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1648 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1649
1650 emit_label (loop_lab);
edff2491 1651
d809253a
EB
1652 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1653 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1654 end_lab);
1655
1656 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1657 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
2f1cd2eb 1658 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
edff2491 1659 1, OPTAB_WIDEN);
edff2491 1660
5b0264cb 1661 gcc_assert (temp == test_addr);
edff2491 1662
d809253a
EB
1663 /* Probe at TEST_ADDR. */
1664 emit_stack_probe (test_addr);
1665
1666 emit_jump (loop_lab);
1667
edff2491
RK
1668 emit_label (end_lab);
1669
d809253a
EB
1670
1671 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1672 that SIZE is equal to ROUNDED_SIZE. */
1673
1674 /* TEMP = SIZE - ROUNDED_SIZE. */
1675 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1676 if (temp != const0_rtx)
1677 {
1678 rtx addr;
1679
32990d5b 1680 if (CONST_INT_P (temp))
d809253a
EB
1681 {
1682 /* Use [base + disp} addressing mode if supported. */
1683 HOST_WIDE_INT offset = INTVAL (temp);
1684 addr = memory_address (Pmode,
0a81f074 1685 plus_constant (Pmode, last_addr,
d809253a
EB
1686 STACK_GROW_OFF (offset)));
1687 }
1688 else
1689 {
1690 /* Manual CSE if the difference is not known at compile-time. */
1691 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1692 addr = memory_address (Pmode,
1693 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1694 last_addr, temp));
1695 }
1696
1697 emit_stack_probe (addr);
1698 }
edff2491 1699 }
eabcc725
EB
1700
1701 /* Make sure nothing is scheduled before we are done. */
1702 emit_insn (gen_blockage ());
edff2491 1703}
d809253a 1704
c35af30f
EB
1705/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1706 while probing it. This pushes when SIZE is positive. SIZE need not
1707 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1708 by plus SIZE at the end. */
d809253a 1709
c35af30f
EB
1710void
1711anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 1712{
c35af30f
EB
1713 /* We skip the probe for the first interval + a small dope of 4 words and
1714 probe that many bytes past the specified size to maintain a protection
1715 area at the botton of the stack. */
d809253a
EB
1716 const int dope = 4 * UNITS_PER_WORD;
1717
1718 /* First ensure SIZE is Pmode. */
1719 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1720 size = convert_to_mode (Pmode, size, 1);
1721
1722 /* If we have a constant small number of probes to generate, that's the
1723 easy case. */
32990d5b 1724 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
1725 {
1726 HOST_WIDE_INT isize = INTVAL (size), i;
1727 bool first_probe = true;
1728
260c8ba3 1729 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1730 values of N from 1 until it exceeds SIZE. If only one probe is
1731 needed, this will not generate any code. Then adjust and probe
1732 to PROBE_INTERVAL + SIZE. */
1733 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1734 {
1735 if (first_probe)
1736 {
1737 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1738 first_probe = false;
1739 }
1740 else
1741 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1742 emit_stack_probe (stack_pointer_rtx);
1743 }
1744
1745 if (first_probe)
0a81f074 1746 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d809253a 1747 else
0a81f074 1748 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
d809253a
EB
1749 emit_stack_probe (stack_pointer_rtx);
1750 }
1751
1752 /* In the variable case, do the same as above, but in a loop. Note that we
1753 must be extra careful with variables wrapping around because we might be
1754 at the very top (or the very bottom) of the address space and we have to
1755 be able to handle this case properly; in particular, we use an equality
1756 test for the loop condition. */
1757 else
1758 {
1759 rtx rounded_size, rounded_size_op, last_addr, temp;
528a80c1
DM
1760 rtx_code_label *loop_lab = gen_label_rtx ();
1761 rtx_code_label *end_lab = gen_label_rtx ();
d809253a
EB
1762
1763
1764 /* Step 1: round SIZE to the previous multiple of the interval. */
1765
1766 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1767 rounded_size
69a59f0f
RS
1768 = simplify_gen_binary (AND, Pmode, size,
1769 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1770 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1771
1772
1773 /* Step 2: compute initial and final value of the loop counter. */
1774
1775 /* SP = SP_0 + PROBE_INTERVAL. */
1776 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1777
1778 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1779 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1780 stack_pointer_rtx,
1781 rounded_size_op), NULL_RTX);
1782
1783
1784 /* Step 3: the loop
1785
260c8ba3
EB
1786 while (SP != LAST_ADDR)
1787 {
1788 SP = SP + PROBE_INTERVAL
1789 probe at SP
1790 }
d809253a 1791
260c8ba3 1792 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1793 values of N from 1 until it is equal to ROUNDED_SIZE. */
1794
1795 emit_label (loop_lab);
1796
1797 /* Jump to END_LAB if SP == LAST_ADDR. */
1798 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1799 Pmode, 1, end_lab);
1800
1801 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1802 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1803 emit_stack_probe (stack_pointer_rtx);
1804
1805 emit_jump (loop_lab);
1806
1807 emit_label (end_lab);
1808
1809
260c8ba3 1810 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
1811 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1812
1813 /* TEMP = SIZE - ROUNDED_SIZE. */
1814 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1815 if (temp != const0_rtx)
1816 {
1817 /* Manual CSE if the difference is not known at compile-time. */
1818 if (GET_CODE (temp) != CONST_INT)
1819 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1820 anti_adjust_stack (temp);
1821 emit_stack_probe (stack_pointer_rtx);
1822 }
1823 }
1824
c35af30f
EB
1825 /* Adjust back and account for the additional first interval. */
1826 if (adjust_back)
0a81f074 1827 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
c35af30f
EB
1828 else
1829 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
1830}
1831
18ca7dab
RK
1832/* Return an rtx representing the register or memory location
1833 in which a scalar value of data type VALTYPE
1834 was returned by a function call to function FUNC.
1d636cc6
RG
1835 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1836 function is known, otherwise 0.
4dc07bd7
JJ
1837 OUTGOING is 1 if on a machine with register windows this function
1838 should return the register in which the function will put its result
30f7a378 1839 and 0 otherwise. */
18ca7dab
RK
1840
1841rtx
586de218 1842hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 1843 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 1844{
4dc07bd7 1845 rtx val;
770ae6cc 1846
1d636cc6 1847 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 1848
f8cfc6aa 1849 if (REG_P (val)
e1a4071f
JL
1850 && GET_MODE (val) == BLKmode)
1851 {
770ae6cc 1852 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
ef4bddc2 1853 machine_mode tmpmode;
770ae6cc 1854
d9b3eb63 1855 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
1856 since the value of bytes will then be large enough that no
1857 mode will match anyway. */
d9b3eb63 1858
e1a4071f 1859 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0fb7aeda
KH
1860 tmpmode != VOIDmode;
1861 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1862 {
1863 /* Have we found a large enough mode? */
1864 if (GET_MODE_SIZE (tmpmode) >= bytes)
1865 break;
1866 }
e1a4071f
JL
1867
1868 /* No suitable mode found. */
5b0264cb 1869 gcc_assert (tmpmode != VOIDmode);
e1a4071f
JL
1870
1871 PUT_MODE (val, tmpmode);
d9b3eb63 1872 }
e1a4071f 1873 return val;
18ca7dab
RK
1874}
1875
1876/* Return an rtx representing the register or memory location
1877 in which a scalar value of mode MODE was returned by a library call. */
1878
1879rtx
ef4bddc2 1880hard_libcall_value (machine_mode mode, rtx fun)
18ca7dab 1881{
390b17c2 1882 return targetm.calls.libcall_value (mode, fun);
18ca7dab 1883}
0c5e217d
RS
1884
1885/* Look up the tree code for a given rtx code
1886 to provide the arithmetic operation for REAL_ARITHMETIC.
1887 The function returns an int because the caller may not know
1888 what `enum tree_code' means. */
1889
1890int
502b8322 1891rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
1892{
1893 enum tree_code tcode;
1894
1895 switch (code)
1896 {
1897 case PLUS:
1898 tcode = PLUS_EXPR;
1899 break;
1900 case MINUS:
1901 tcode = MINUS_EXPR;
1902 break;
1903 case MULT:
1904 tcode = MULT_EXPR;
1905 break;
1906 case DIV:
1907 tcode = RDIV_EXPR;
1908 break;
1909 case SMIN:
1910 tcode = MIN_EXPR;
1911 break;
1912 case SMAX:
1913 tcode = MAX_EXPR;
1914 break;
1915 default:
1916 tcode = LAST_AND_UNUSED_TREE_CODE;
1917 break;
1918 }
1919 return ((int) tcode);
1920}
e2500fed
GK
1921
1922#include "gt-explow.h"