]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
Correct a function pre/postcondition [PR102403].
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
99dee823 2 Copyright (C) 1987-2021 Free Software Foundation, Inc.
18ca7dab 3
1322177d 4This file is part of GCC.
18ca7dab 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
18ca7dab 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
18ca7dab
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
18ca7dab
RK
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
957060b5
AM
24#include "target.h"
25#include "function.h"
18ca7dab
RK
26#include "rtl.h"
27#include "tree.h"
4d0cdd0c 28#include "memmodel.h"
6baf1cc8 29#include "tm_p.h"
e34153b0 30#include "optabs.h"
957060b5 31#include "expmed.h"
357067f2 32#include "profile-count.h"
957060b5
AM
33#include "emit-rtl.h"
34#include "recog.h"
35#include "diagnostic-core.h"
957060b5 36#include "stor-layout.h"
b38f3813 37#include "except.h"
36566b39
PK
38#include "dojump.h"
39#include "explow.h"
18ca7dab 40#include "expr.h"
3cf3da88 41#include "stringpool.h"
677f3fa8 42#include "common/common-target.h"
aacd3885 43#include "output.h"
18ca7dab 44
502b8322 45static rtx break_out_memory_refs (rtx);
7e4ce834
RH
46
47
48/* Truncate and perhaps sign-extend C as appropriate for MODE. */
49
50HOST_WIDE_INT
ef4bddc2 51trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
7e4ce834 52{
db61b7f9
RS
53 /* Not scalar_int_mode because we also allow pointer bound modes. */
54 scalar_mode smode = as_a <scalar_mode> (mode);
55 int width = GET_MODE_PRECISION (smode);
7e4ce834 56
71012d97 57 /* You want to truncate to a _what_? */
eafa30ef 58 gcc_assert (SCALAR_INT_MODE_P (mode));
71012d97 59
1f3f36d1 60 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
db61b7f9 61 if (smode == BImode)
1f3f36d1
RH
62 return c & 1 ? STORE_FLAG_VALUE : 0;
63
5b0d91c3
AO
64 /* Sign-extend for the requested mode. */
65
66 if (width < HOST_BITS_PER_WIDE_INT)
67 {
68 HOST_WIDE_INT sign = 1;
69 sign <<= width - 1;
70 c &= (sign << 1) - 1;
71 c ^= sign;
72 c -= sign;
73 }
7e4ce834
RH
74
75 return c;
76}
77
0c12fc9b
RS
78/* Likewise for polynomial values, using the sign-extended representation
79 for each individual coefficient. */
80
81poly_int64
82trunc_int_for_mode (poly_int64 x, machine_mode mode)
83{
84 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
85 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
86 return x;
87}
88
929e10f4 89/* Return an rtx for the sum of X and the integer C, given that X has
23b33725
RS
90 mode MODE. INPLACE is true if X can be modified inplace or false
91 if it must be treated as immutable. */
18ca7dab
RK
92
93rtx
0c12fc9b 94plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
18ca7dab 95{
b3694847 96 RTX_CODE code;
17ab7c59 97 rtx y;
b3694847 98 rtx tem;
18ca7dab
RK
99 int all_constant = 0;
100
0a81f074
RS
101 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
102
0c12fc9b 103 if (known_eq (c, 0))
18ca7dab
RK
104 return x;
105
106 restart:
107
108 code = GET_CODE (x);
17ab7c59
RK
109 y = x;
110
18ca7dab
RK
111 switch (code)
112 {
807e902e 113 CASE_CONST_SCALAR_INT:
f079167a 114 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
18ca7dab
RK
115 case MEM:
116 /* If this is a reference to the constant pool, try replacing it with
117 a reference to a new constant. If the resulting address isn't
118 valid, don't return it because we have no way to validize it. */
119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121 {
8a27cf73
UB
122 rtx cst = get_pool_constant (XEXP (x, 0));
123
124 if (GET_CODE (cst) == CONST_VECTOR
125 && GET_MODE_INNER (GET_MODE (cst)) == mode)
126 {
127 cst = gen_lowpart (mode, cst);
128 gcc_assert (cst);
129 }
0ab503d3
JJ
130 else if (GET_MODE (cst) == VOIDmode
131 && get_pool_mode (XEXP (x, 0)) != mode)
132 break;
2b568899
RB
133 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
134 {
135 tem = plus_constant (mode, cst, c);
136 tem = force_const_mem (GET_MODE (x), tem);
137 /* Targets may disallow some constants in the constant pool, thus
138 force_const_mem may return NULL_RTX. */
139 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
140 return tem;
141 }
18ca7dab
RK
142 }
143 break;
144
145 case CONST:
146 /* If adding to something entirely constant, set a flag
147 so that we can add a CONST around the result. */
23b33725
RS
148 if (inplace && shared_const_p (x))
149 inplace = false;
18ca7dab
RK
150 x = XEXP (x, 0);
151 all_constant = 1;
152 goto restart;
153
154 case SYMBOL_REF:
155 case LABEL_REF:
156 all_constant = 1;
157 break;
158
159 case PLUS:
929e10f4
MS
160 /* The interesting case is adding the integer to a sum. Look
161 for constant term in the sum and combine with C. For an
162 integer constant term or a constant term that is not an
163 explicit integer, we combine or group them together anyway.
03d937fc
R
164
165 We may not immediately return from the recursive call here, lest
166 all_constant gets lost. */
e5671f2b 167
929e10f4 168 if (CONSTANT_P (XEXP (x, 1)))
03d937fc 169 {
23b33725
RS
170 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
171 if (term == const0_rtx)
172 x = XEXP (x, 0);
173 else if (inplace)
174 XEXP (x, 1) = term;
175 else
176 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
03d937fc
R
177 c = 0;
178 }
23b33725 179 else if (rtx *const_loc = find_constant_term_loc (&y))
03d937fc 180 {
23b33725
RS
181 if (!inplace)
182 {
183 /* We need to be careful since X may be shared and we can't
184 modify it in place. */
185 x = copy_rtx (x);
186 const_loc = find_constant_term_loc (&x);
187 }
188 *const_loc = plus_constant (mode, *const_loc, c, true);
03d937fc
R
189 c = 0;
190 }
38a448ca 191 break;
ed8908e7 192
38a448ca 193 default:
0c12fc9b
RS
194 if (CONST_POLY_INT_P (x))
195 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
38a448ca 196 break;
18ca7dab
RK
197 }
198
0c12fc9b 199 if (maybe_ne (c, 0))
4789c0ce 200 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
18ca7dab
RK
201
202 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
203 return x;
204 else if (all_constant)
38a448ca 205 return gen_rtx_CONST (mode, x);
18ca7dab
RK
206 else
207 return x;
208}
18ca7dab
RK
209\f
210/* If X is a sum, return a new sum like X but lacking any constant terms.
211 Add all the removed constant terms into *CONSTPTR.
212 X itself is not altered. The result != X if and only if
213 it is not isomorphic to X. */
214
215rtx
502b8322 216eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 217{
b3694847 218 rtx x0, x1;
18ca7dab
RK
219 rtx tem;
220
221 if (GET_CODE (x) != PLUS)
222 return x;
223
224 /* First handle constants appearing at this level explicitly. */
481683e1 225 if (CONST_INT_P (XEXP (x, 1))
01512446
JJ
226 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
227 XEXP (x, 1))) != 0
481683e1 228 && CONST_INT_P (tem))
18ca7dab
RK
229 {
230 *constptr = tem;
231 return eliminate_constant_term (XEXP (x, 0), constptr);
232 }
233
234 tem = const0_rtx;
235 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
236 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
237 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
01512446
JJ
238 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
239 *constptr, tem)) != 0
481683e1 240 && CONST_INT_P (tem))
18ca7dab
RK
241 {
242 *constptr = tem;
38a448ca 243 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
244 }
245
246 return x;
247}
248
18ca7dab
RK
249\f
250/* Return a copy of X in which all memory references
251 and all constants that involve symbol refs
252 have been replaced with new temporary registers.
253 Also emit code to load the memory locations and constants
254 into those registers.
255
256 If X contains no such constants or memory references,
257 X itself (not a copy) is returned.
258
259 If a constant is found in the address that is not a legitimate constant
260 in an insn, it is left alone in the hope that it might be valid in the
261 address.
262
263 X may contain no arithmetic except addition, subtraction and multiplication.
264 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
265
266static rtx
502b8322 267break_out_memory_refs (rtx x)
18ca7dab 268{
3c0cb5de 269 if (MEM_P (x)
cabeca29 270 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 271 && GET_MODE (x) != VOIDmode))
2cca6e3f 272 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
273 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
274 || GET_CODE (x) == MULT)
275 {
b3694847
SS
276 rtx op0 = break_out_memory_refs (XEXP (x, 0));
277 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 278
18ca7dab 279 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 280 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 281 }
2cca6e3f 282
18ca7dab
RK
283 return x;
284}
285
d4ebfa65
BE
286/* Given X, a memory address in address space AS' pointer mode, convert it to
287 an address in the address space's address mode, or vice versa (TO_MODE says
288 which way). We take advantage of the fact that pointers are not allowed to
289 overflow by commuting arithmetic operations over conversions so that address
7745730f 290 arithmetic insns can be used. IN_CONST is true if this conversion is inside
3d3f9e7e
JJ
291 a CONST. NO_EMIT is true if no insns should be emitted, and instead
292 it should return NULL if it can't be simplified without emitting insns. */
ea534b63 293
3d3f9e7e 294rtx
095a2d76 295convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
7745730f 296 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
3d3f9e7e
JJ
297 bool in_const ATTRIBUTE_UNUSED,
298 bool no_emit ATTRIBUTE_UNUSED)
ea534b63 299{
5ae6cd0d 300#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 301 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
302 return x;
303#else /* defined(POINTERS_EXTEND_UNSIGNED) */
095a2d76 304 scalar_int_mode pointer_mode, address_mode, from_mode;
498b529f 305 rtx temp;
aa0f70e6 306 enum rtx_code code;
498b529f 307
5ae6cd0d
MM
308 /* If X already has the right mode, just return it. */
309 if (GET_MODE (x) == to_mode)
310 return x;
311
d4ebfa65
BE
312 pointer_mode = targetm.addr_space.pointer_mode (as);
313 address_mode = targetm.addr_space.address_mode (as);
314 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 315
0b04ec8c
RK
316 /* Here we handle some special cases. If none of them apply, fall through
317 to the default case. */
ea534b63
RK
318 switch (GET_CODE (x))
319 {
d8116890 320 CASE_CONST_SCALAR_INT:
aa0f70e6
SE
321 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
322 code = TRUNCATE;
323 else if (POINTERS_EXTEND_UNSIGNED < 0)
324 break;
325 else if (POINTERS_EXTEND_UNSIGNED > 0)
326 code = ZERO_EXTEND;
327 else
328 code = SIGN_EXTEND;
329 temp = simplify_unary_operation (code, to_mode, x, from_mode);
330 if (temp)
331 return temp;
332 break;
498b529f 333
d1405722 334 case SUBREG:
5da4f548 335 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 336 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
337 return SUBREG_REG (x);
338 break;
339
ea534b63 340 case LABEL_REF:
04a121a7 341 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
5da4f548
SE
342 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
343 return temp;
498b529f 344
ea534b63 345 case SYMBOL_REF:
ce02ba25
EC
346 temp = shallow_copy_rtx (x);
347 PUT_MODE (temp, to_mode);
5da4f548 348 return temp;
ea534b63 349
498b529f 350 case CONST:
3d3f9e7e
JJ
351 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
352 true, no_emit);
353 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
ea534b63 354
0b04ec8c
RK
355 case PLUS:
356 case MULT:
ceeb2cbc
AP
357 /* For addition we can safely permute the conversion and addition
358 operation if one operand is a constant and converting the constant
359 does not change it or if one operand is a constant and we are
360 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
17939c98 361 We can always safely permute them if we are making the address
7745730f
AP
362 narrower. Inside a CONST RTL, this is safe for both pointers
363 zero or sign extended as pointers cannot wrap. */
aa0f70e6
SE
364 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
365 || (GET_CODE (x) == PLUS
481683e1 366 && CONST_INT_P (XEXP (x, 1))
7745730f
AP
367 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
368 || XEXP (x, 1) == convert_memory_address_addr_space_1
3d3f9e7e
JJ
369 (to_mode, XEXP (x, 1), as, in_const,
370 no_emit)
7745730f 371 || POINTERS_EXTEND_UNSIGNED < 0)))
3d3f9e7e
JJ
372 {
373 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
374 as, in_const, no_emit);
b88990be
JJ
375 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
376 temp, XEXP (x, 1))
377 : temp);
3d3f9e7e 378 }
38a448ca 379 break;
d9b3eb63 380
e8beba1c
RS
381 case UNSPEC:
382 /* Assume that all UNSPECs in a constant address can be converted
383 operand-by-operand. We could add a target hook if some targets
384 require different behavior. */
385 if (in_const && GET_MODE (x) == from_mode)
386 {
387 unsigned int n = XVECLEN (x, 0);
388 rtvec v = gen_rtvec (n);
389 for (unsigned int i = 0; i < n; ++i)
390 {
391 rtx op = XVECEXP (x, 0, i);
392 if (GET_MODE (op) == from_mode)
393 op = convert_memory_address_addr_space_1 (to_mode, op, as,
394 in_const, no_emit);
395 RTVEC_ELT (v, i) = op;
396 }
397 return gen_rtx_UNSPEC (to_mode, v, XINT (x, 1));
398 }
399 break;
400
38a448ca
RH
401 default:
402 break;
ea534b63 403 }
0b04ec8c 404
3d3f9e7e
JJ
405 if (no_emit)
406 return NULL_RTX;
407
0b04ec8c
RK
408 return convert_modes (to_mode, from_mode,
409 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 410#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 411}
7745730f
AP
412
413/* Given X, a memory address in address space AS' pointer mode, convert it to
414 an address in the address space's address mode, or vice versa (TO_MODE says
415 which way). We take advantage of the fact that pointers are not allowed to
416 overflow by commuting arithmetic operations over conversions so that address
417 arithmetic insns can be used. */
418
419rtx
095a2d76
RS
420convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
421 addr_space_t as)
7745730f 422{
3d3f9e7e 423 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
7745730f 424}
18ca7dab 425\f
36566b39 426
09e881c9
BE
427/* Return something equivalent to X but valid as a memory address for something
428 of mode MODE in the named address space AS. When X is not itself valid,
429 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
430
431rtx
ef4bddc2 432memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
18ca7dab 433{
b3694847 434 rtx oldx = x;
095a2d76 435 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 436
d4ebfa65 437 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 438
ba228239 439 /* By passing constant addresses through registers
18ca7dab 440 we get a chance to cse them. */
cabeca29 441 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 442 x = force_reg (address_mode, x);
18ca7dab 443
18ca7dab
RK
444 /* We get better cse by rejecting indirect addressing at this stage.
445 Let the combiner create indirect addresses where appropriate.
446 For now, generate the code so that the subexpressions useful to share
447 are visible. But not if cse won't be done! */
18b9ca6f 448 else
18ca7dab 449 {
f8cfc6aa 450 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
451 x = break_out_memory_refs (x);
452
453 /* At this point, any valid address is accepted. */
09e881c9 454 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 455 goto done;
18b9ca6f
RK
456
457 /* If it was valid before but breaking out memory refs invalidated it,
458 use it the old way. */
09e881c9 459 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
460 {
461 x = oldx;
462 goto done;
463 }
18b9ca6f
RK
464
465 /* Perform machine-dependent transformations on X
466 in certain cases. This is not necessary since the code
467 below can handle all possible cases, but machine-dependent
468 transformations can make better code. */
506d7b68 469 {
09e881c9
BE
470 rtx orig_x = x;
471 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
472 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
473 goto done;
474 }
18b9ca6f
RK
475
476 /* PLUS and MULT can appear in special ways
477 as the result of attempts to make an address usable for indexing.
478 Usually they are dealt with by calling force_operand, below.
479 But a sum containing constant terms is special
480 if removing them makes the sum a valid address:
481 then we generate that address in a register
482 and index off of it. We do this because it often makes
483 shorter code, and because the addresses thus generated
484 in registers often become common subexpressions. */
485 if (GET_CODE (x) == PLUS)
486 {
487 rtx constant_term = const0_rtx;
488 rtx y = eliminate_constant_term (x, &constant_term);
489 if (constant_term == const0_rtx
09e881c9 490 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
491 x = force_operand (x, NULL_RTX);
492 else
493 {
38a448ca 494 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 495 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
496 x = force_operand (x, NULL_RTX);
497 else
498 x = y;
499 }
500 }
18ca7dab 501
e475ed2a 502 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 503 x = force_operand (x, NULL_RTX);
18ca7dab 504
18b9ca6f
RK
505 /* If we have a register that's an invalid address,
506 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 507 else if (REG_P (x))
18b9ca6f
RK
508 x = copy_to_reg (x);
509
510 /* Last resort: copy the value to a register, since
511 the register is a valid address. */
512 else
d4ebfa65 513 x = force_reg (address_mode, x);
18ca7dab 514 }
18b9ca6f
RK
515
516 done:
517
09e881c9 518 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
519 /* If we didn't change the address, we are done. Otherwise, mark
520 a reg as a pointer if we have REG or REG + CONST_INT. */
521 if (oldx == x)
522 return x;
f8cfc6aa 523 else if (REG_P (x))
bdb429a5 524 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 525 else if (GET_CODE (x) == PLUS
f8cfc6aa 526 && REG_P (XEXP (x, 0))
481683e1 527 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 528 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 529
18b9ca6f
RK
530 /* OLDX may have been the address on a temporary. Update the address
531 to indicate that X is now used. */
532 update_temp_slot_address (oldx, x);
533
18ca7dab
RK
534 return x;
535}
536
b8105705
EB
537/* Convert a mem ref into one with a valid memory address.
538 Pass through anything else unchanged. */
18ca7dab
RK
539
540rtx
502b8322 541validize_mem (rtx ref)
18ca7dab 542{
3c0cb5de 543 if (!MEM_P (ref))
18ca7dab 544 return ref;
aacd3885 545 ref = use_anchored_address (ref);
09e881c9
BE
546 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
547 MEM_ADDR_SPACE (ref)))
18ca7dab 548 return ref;
792760b9 549
b8105705
EB
550 /* Don't alter REF itself, since that is probably a stack slot. */
551 return replace_equiv_address (ref, XEXP (ref, 0));
18ca7dab 552}
aacd3885
RS
553
554/* If X is a memory reference to a member of an object block, try rewriting
555 it to use an anchor instead. Return the new memory reference on success
556 and the old one on failure. */
557
558rtx
559use_anchored_address (rtx x)
560{
561 rtx base;
562 HOST_WIDE_INT offset;
ef4bddc2 563 machine_mode mode;
aacd3885
RS
564
565 if (!flag_section_anchors)
566 return x;
567
568 if (!MEM_P (x))
569 return x;
570
571 /* Split the address into a base and offset. */
572 base = XEXP (x, 0);
573 offset = 0;
574 if (GET_CODE (base) == CONST
575 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 576 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
577 {
578 offset += INTVAL (XEXP (XEXP (base, 0), 1));
579 base = XEXP (XEXP (base, 0), 0);
580 }
581
582 /* Check whether BASE is suitable for anchors. */
583 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 584 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 585 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 586 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
587 || !targetm.use_anchors_for_symbol_p (base))
588 return x;
589
590 /* Decide where BASE is going to be. */
591 place_block_symbol (base);
592
593 /* Get the anchor we need to use. */
594 offset += SYMBOL_REF_BLOCK_OFFSET (base);
595 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
596 SYMBOL_REF_TLS_MODEL (base));
597
598 /* Work out the offset from the anchor. */
599 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
600
601 /* If we're going to run a CSE pass, force the anchor into a register.
602 We will then be able to reuse registers for several accesses, if the
603 target costs say that that's worthwhile. */
0a81f074 604 mode = GET_MODE (base);
aacd3885 605 if (!cse_not_expected)
0a81f074 606 base = force_reg (mode, base);
aacd3885 607
0a81f074 608 return replace_equiv_address (x, plus_constant (mode, base, offset));
aacd3885 609}
18ca7dab 610\f
18ca7dab
RK
611/* Copy the value or contents of X to a new temp reg and return that reg. */
612
613rtx
502b8322 614copy_to_reg (rtx x)
18ca7dab 615{
b3694847 616 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 617
18ca7dab 618 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 619 do the computation. */
18ca7dab
RK
620 if (! general_operand (x, VOIDmode))
621 x = force_operand (x, temp);
d9b3eb63 622
18ca7dab
RK
623 if (x != temp)
624 emit_move_insn (temp, x);
625
626 return temp;
627}
628
629/* Like copy_to_reg but always give the new register mode Pmode
630 in case X is a constant. */
631
632rtx
502b8322 633copy_addr_to_reg (rtx x)
18ca7dab
RK
634{
635 return copy_to_mode_reg (Pmode, x);
636}
637
638/* Like copy_to_reg but always give the new register mode MODE
639 in case X is a constant. */
640
641rtx
ef4bddc2 642copy_to_mode_reg (machine_mode mode, rtx x)
18ca7dab 643{
b3694847 644 rtx temp = gen_reg_rtx (mode);
d9b3eb63 645
18ca7dab 646 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 647 do the computation. */
18ca7dab
RK
648 if (! general_operand (x, VOIDmode))
649 x = force_operand (x, temp);
650
5b0264cb 651 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
652 if (x != temp)
653 emit_move_insn (temp, x);
654 return temp;
655}
656
657/* Load X into a register if it is not already one.
658 Use mode MODE for the register.
659 X should be valid for mode MODE, but it may be a constant which
660 is valid for all integer modes; that's why caller must specify MODE.
661
662 The caller must not alter the value in the register we return,
663 since we mark it as a "constant" register. */
664
665rtx
ef4bddc2 666force_reg (machine_mode mode, rtx x)
18ca7dab 667{
528a80c1
DM
668 rtx temp, set;
669 rtx_insn *insn;
18ca7dab 670
f8cfc6aa 671 if (REG_P (x))
18ca7dab 672 return x;
d9b3eb63 673
e3c8ea67
RH
674 if (general_operand (x, mode))
675 {
676 temp = gen_reg_rtx (mode);
677 insn = emit_move_insn (temp, x);
678 }
679 else
680 {
681 temp = force_operand (x, NULL_RTX);
f8cfc6aa 682 if (REG_P (temp))
e3c8ea67
RH
683 insn = get_last_insn ();
684 else
685 {
686 rtx temp2 = gen_reg_rtx (mode);
687 insn = emit_move_insn (temp2, temp);
688 temp = temp2;
689 }
690 }
62874575 691
18ca7dab 692 /* Let optimizers know that TEMP's value never changes
62874575
RK
693 and that X can be substituted for it. Don't get confused
694 if INSN set something else (such as a SUBREG of TEMP). */
695 if (CONSTANT_P (x)
696 && (set = single_set (insn)) != 0
fd7acc30
RS
697 && SET_DEST (set) == temp
698 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 699 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 700
4a4f95d9
RH
701 /* Let optimizers know that TEMP is a pointer, and if so, the
702 known alignment of that pointer. */
703 {
704 unsigned align = 0;
705 if (GET_CODE (x) == SYMBOL_REF)
706 {
707 align = BITS_PER_UNIT;
708 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
709 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
710 }
711 else if (GET_CODE (x) == LABEL_REF)
712 align = BITS_PER_UNIT;
713 else if (GET_CODE (x) == CONST
714 && GET_CODE (XEXP (x, 0)) == PLUS
715 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 716 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
717 {
718 rtx s = XEXP (XEXP (x, 0), 0);
719 rtx c = XEXP (XEXP (x, 0), 1);
720 unsigned sa, ca;
721
722 sa = BITS_PER_UNIT;
723 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
724 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
725
bd95721f
RH
726 if (INTVAL (c) == 0)
727 align = sa;
728 else
729 {
730 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
731 align = MIN (sa, ca);
732 }
4a4f95d9
RH
733 }
734
0a317111 735 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
736 mark_reg_pointer (temp, align);
737 }
738
18ca7dab
RK
739 return temp;
740}
741
742/* If X is a memory ref, copy its contents to a new temp reg and return
743 that reg. Otherwise, return X. */
744
745rtx
502b8322 746force_not_mem (rtx x)
18ca7dab 747{
b3694847 748 rtx temp;
fe3439b0 749
3c0cb5de 750 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 751 return x;
fe3439b0 752
18ca7dab 753 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
754
755 if (MEM_POINTER (x))
756 REG_POINTER (temp) = 1;
757
18ca7dab
RK
758 emit_move_insn (temp, x);
759 return temp;
760}
761
762/* Copy X to TARGET (if it's nonzero and a reg)
763 or to a new temp reg and return that reg.
764 MODE is the mode to use for X in case it is a constant. */
765
766rtx
ef4bddc2 767copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
18ca7dab 768{
b3694847 769 rtx temp;
18ca7dab 770
f8cfc6aa 771 if (target && REG_P (target))
18ca7dab
RK
772 temp = target;
773 else
774 temp = gen_reg_rtx (mode);
775
776 emit_move_insn (temp, x);
777 return temp;
778}
779\f
cde0f3fd 780/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
781 PUNSIGNEDP points to the signedness of the type and may be adjusted
782 to show what signedness to use on extension operations.
783
cde0f3fd
PB
784 FOR_RETURN is nonzero if the caller is promoting the return value
785 of FNDECL, else it is for promoting args. */
9ff65789 786
ef4bddc2
RS
787machine_mode
788promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
cde0f3fd
PB
789 const_tree funtype, int for_return)
790{
5e617be8
AK
791 /* Called without a type node for a libcall. */
792 if (type == NULL_TREE)
793 {
794 if (INTEGRAL_MODE_P (mode))
795 return targetm.calls.promote_function_mode (NULL_TREE, mode,
796 punsignedp, funtype,
797 for_return);
798 else
799 return mode;
800 }
801
cde0f3fd
PB
802 switch (TREE_CODE (type))
803 {
804 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
805 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
806 case POINTER_TYPE: case REFERENCE_TYPE:
807 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
808 for_return);
809
810 default:
811 return mode;
812 }
813}
814/* Return the mode to use to store a scalar of TYPE and MODE.
815 PUNSIGNEDP points to the signedness of the type and may be adjusted
816 to show what signedness to use on extension operations. */
d4453b7a 817
ef4bddc2
RS
818machine_mode
819promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
b1680483 820 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 821{
1e3287d0
RG
822#ifdef PROMOTE_MODE
823 enum tree_code code;
824 int unsignedp;
79d22165 825 scalar_mode smode;
1e3287d0
RG
826#endif
827
5e617be8
AK
828 /* For libcalls this is invoked without TYPE from the backends
829 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
830 case. */
831 if (type == NULL_TREE)
832 return mode;
833
cde0f3fd
PB
834 /* FIXME: this is the same logic that was there until GCC 4.4, but we
835 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
836 is not defined. The affected targets are M32C, S390, SPARC. */
837#ifdef PROMOTE_MODE
1e3287d0
RG
838 code = TREE_CODE (type);
839 unsignedp = *punsignedp;
9ff65789 840
9ff65789
RK
841 switch (code)
842 {
9ff65789 843 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 844 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
79d22165
RS
845 /* Values of these types always have scalar mode. */
846 smode = as_a <scalar_mode> (mode);
847 PROMOTE_MODE (smode, unsignedp, type);
cde0f3fd 848 *punsignedp = unsignedp;
79d22165 849 return smode;
9ff65789 850
ea534b63 851#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 852 case REFERENCE_TYPE:
9ff65789 853 case POINTER_TYPE:
cde0f3fd 854 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
855 return targetm.addr_space.address_mode
856 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
ea534b63 857#endif
d9b3eb63 858
38a448ca 859 default:
cde0f3fd 860 return mode;
9ff65789 861 }
cde0f3fd 862#else
9ff65789 863 return mode;
cde0f3fd 864#endif
9ff65789 865}
cde0f3fd
PB
866
867
868/* Use one of promote_mode or promote_function_mode to find the promoted
869 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
870 of DECL after promotion. */
871
ef4bddc2 872machine_mode
cde0f3fd
PB
873promote_decl_mode (const_tree decl, int *punsignedp)
874{
875 tree type = TREE_TYPE (decl);
876 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
877 machine_mode mode = DECL_MODE (decl);
878 machine_mode pmode;
cde0f3fd 879
f11a7b6d
AO
880 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
881 pmode = promote_function_mode (type, mode, &unsignedp,
882 TREE_TYPE (current_function_decl), 1);
883 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 884 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 885 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
886 else
887 pmode = promote_mode (type, mode, &unsignedp);
888
889 if (punsignedp)
890 *punsignedp = unsignedp;
891 return pmode;
892}
893
1f9ceff1
AO
894/* Return the promoted mode for name. If it is a named SSA_NAME, it
895 is the same as promote_decl_mode. Otherwise, it is the promoted
896 mode of a temp decl of same type as the SSA_NAME, if we had created
897 one. */
898
899machine_mode
900promote_ssa_mode (const_tree name, int *punsignedp)
901{
902 gcc_assert (TREE_CODE (name) == SSA_NAME);
903
904 /* Partitions holding parms and results must be promoted as expected
905 by function.c. */
906 if (SSA_NAME_VAR (name)
907 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
908 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
f11a7b6d
AO
909 {
910 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
911 if (mode != BLKmode)
912 return mode;
913 }
1f9ceff1
AO
914
915 tree type = TREE_TYPE (name);
916 int unsignedp = TYPE_UNSIGNED (type);
a59b2e42 917 machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
1f9ceff1
AO
918 if (punsignedp)
919 *punsignedp = unsignedp;
920
921 return pmode;
922}
923
924
9ff65789 925\f
9c582551 926/* Controls the behavior of {anti_,}adjust_stack. */
9a08d230
RH
927static bool suppress_reg_args_size;
928
929/* A helper for adjust_stack and anti_adjust_stack. */
930
931static void
932adjust_stack_1 (rtx adjust, bool anti_p)
933{
528a80c1
DM
934 rtx temp;
935 rtx_insn *insn;
9a08d230 936
9a08d230 937 /* Hereafter anti_p means subtract_p. */
581edfa3
TS
938 if (!STACK_GROWS_DOWNWARD)
939 anti_p = !anti_p;
9a08d230
RH
940
941 temp = expand_binop (Pmode,
942 anti_p ? sub_optab : add_optab,
943 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
944 OPTAB_LIB_WIDEN);
945
946 if (temp != stack_pointer_rtx)
947 insn = emit_move_insn (stack_pointer_rtx, temp);
948 else
949 {
950 insn = get_last_insn ();
951 temp = single_set (insn);
952 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
953 }
954
955 if (!suppress_reg_args_size)
68184180 956 add_args_size_note (insn, stack_pointer_delta);
9a08d230
RH
957}
958
18ca7dab
RK
959/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
960 This pops when ADJUST is positive. ADJUST need not be constant. */
961
962void
502b8322 963adjust_stack (rtx adjust)
18ca7dab 964{
18ca7dab
RK
965 if (adjust == const0_rtx)
966 return;
967
1503a7ec
JH
968 /* We expect all variable sized adjustments to be multiple of
969 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
970 poly_int64 const_adjust;
971 if (poly_int_rtx_p (adjust, &const_adjust))
972 stack_pointer_delta -= const_adjust;
1503a7ec 973
9a08d230 974 adjust_stack_1 (adjust, false);
18ca7dab
RK
975}
976
977/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
978 This pushes when ADJUST is positive. ADJUST need not be constant. */
979
980void
502b8322 981anti_adjust_stack (rtx adjust)
18ca7dab 982{
18ca7dab
RK
983 if (adjust == const0_rtx)
984 return;
985
1503a7ec
JH
986 /* We expect all variable sized adjustments to be multiple of
987 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
988 poly_int64 const_adjust;
989 if (poly_int_rtx_p (adjust, &const_adjust))
990 stack_pointer_delta += const_adjust;
1503a7ec 991
9a08d230 992 adjust_stack_1 (adjust, true);
18ca7dab
RK
993}
994
995/* Round the size of a block to be pushed up to the boundary required
996 by this machine. SIZE is the desired size, which need not be constant. */
997
4dd9b044 998static rtx
502b8322 999round_push (rtx size)
18ca7dab 1000{
32990d5b 1001 rtx align_rtx, alignm1_rtx;
41ee3204 1002
32990d5b
JJ
1003 if (!SUPPORTS_STACK_ALIGNMENT
1004 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 1005 {
32990d5b
JJ
1006 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1007
1008 if (align == 1)
1009 return size;
1010
1011 if (CONST_INT_P (size))
1012 {
1013 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 1014
32990d5b
JJ
1015 if (INTVAL (size) != new_size)
1016 size = GEN_INT (new_size);
1017 return size;
1018 }
1019
1020 align_rtx = GEN_INT (align);
1021 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
1022 }
1023 else
1024 {
32990d5b
JJ
1025 /* If crtl->preferred_stack_boundary might still grow, use
1026 virtual_preferred_stack_boundary_rtx instead. This will be
1027 substituted by the right value in vregs pass and optimized
1028 during combine. */
1029 align_rtx = virtual_preferred_stack_boundary_rtx;
0a81f074
RS
1030 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1031 NULL_RTX);
18ca7dab 1032 }
41ee3204 1033
32990d5b
JJ
1034 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1035 but we know it can't. So add ourselves and then do
1036 TRUNC_DIV_EXPR. */
1037 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1038 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1039 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1040 NULL_RTX, 1);
1041 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1042
18ca7dab
RK
1043 return size;
1044}
1045\f
59257ff7
RK
1046/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1047 to a previously-created save area. If no save area has been allocated,
1048 this function will allocate one. If a save area is specified, it
9eac0f2a 1049 must be of the proper mode. */
59257ff7
RK
1050
1051void
9eac0f2a 1052emit_stack_save (enum save_level save_level, rtx *psave)
59257ff7
RK
1053{
1054 rtx sa = *psave;
1055 /* The default is that we use a move insn and save in a Pmode object. */
4476e1a0 1056 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
ef4bddc2 1057 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
1058
1059 /* See if this machine has anything special to do for this kind of save. */
1060 switch (save_level)
1061 {
59257ff7 1062 case SAVE_BLOCK:
4476e1a0
RS
1063 if (targetm.have_save_stack_block ())
1064 fcn = targetm.gen_save_stack_block;
59257ff7 1065 break;
59257ff7 1066 case SAVE_FUNCTION:
4476e1a0
RS
1067 if (targetm.have_save_stack_function ())
1068 fcn = targetm.gen_save_stack_function;
59257ff7 1069 break;
59257ff7 1070 case SAVE_NONLOCAL:
4476e1a0
RS
1071 if (targetm.have_save_stack_nonlocal ())
1072 fcn = targetm.gen_save_stack_nonlocal;
59257ff7 1073 break;
38a448ca
RH
1074 default:
1075 break;
59257ff7
RK
1076 }
1077
1078 /* If there is no save area and we have to allocate one, do so. Otherwise
1079 verify the save area is the proper mode. */
1080
1081 if (sa == 0)
1082 {
1083 if (mode != VOIDmode)
1084 {
1085 if (save_level == SAVE_NONLOCAL)
1086 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1087 else
1088 *psave = sa = gen_reg_rtx (mode);
1089 }
1090 }
59257ff7 1091
9eac0f2a
RH
1092 do_pending_stack_adjust ();
1093 if (sa != 0)
1094 sa = validize_mem (sa);
1095 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
1096}
1097
1098/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
9eac0f2a 1099 area made by emit_stack_save. If it is zero, we have nothing to do. */
59257ff7
RK
1100
1101void
9eac0f2a 1102emit_stack_restore (enum save_level save_level, rtx sa)
59257ff7
RK
1103{
1104 /* The default is that we use a move insn. */
4476e1a0 1105 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
59257ff7 1106
50025f91
TV
1107 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1108 STACK_POINTER and HARD_FRAME_POINTER.
1109 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1110 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1111 aligned variables, which is reflected in ix86_can_eliminate.
1112 We normally still have the realigned STACK_POINTER that we can use.
1113 But if there is a stack restore still present at reload, it can trigger
1114 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1115 FRAME_POINTER into a hard reg.
1116 To prevent this situation, we force need_drap if we emit a stack
1117 restore. */
1118 if (SUPPORTS_STACK_ALIGNMENT)
1119 crtl->need_drap = true;
1120
59257ff7
RK
1121 /* See if this machine has anything special to do for this kind of save. */
1122 switch (save_level)
1123 {
59257ff7 1124 case SAVE_BLOCK:
4476e1a0
RS
1125 if (targetm.have_restore_stack_block ())
1126 fcn = targetm.gen_restore_stack_block;
59257ff7 1127 break;
59257ff7 1128 case SAVE_FUNCTION:
4476e1a0
RS
1129 if (targetm.have_restore_stack_function ())
1130 fcn = targetm.gen_restore_stack_function;
59257ff7 1131 break;
59257ff7 1132 case SAVE_NONLOCAL:
4476e1a0
RS
1133 if (targetm.have_restore_stack_nonlocal ())
1134 fcn = targetm.gen_restore_stack_nonlocal;
59257ff7 1135 break;
38a448ca
RH
1136 default:
1137 break;
59257ff7
RK
1138 }
1139
d072107f 1140 if (sa != 0)
260f91c2
DJ
1141 {
1142 sa = validize_mem (sa);
1143 /* These clobbers prevent the scheduler from moving
1144 references to variable arrays below the code
4b7e68e7 1145 that deletes (pops) the arrays. */
c41c1387
RS
1146 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1147 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1148 }
d072107f 1149
a494ed43
EB
1150 discard_pending_stack_adjust ();
1151
9eac0f2a 1152 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1153}
6de9cd9a
DN
1154
1155/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
d33606c3
EB
1156 function. This should be called whenever we allocate or deallocate
1157 dynamic stack space. */
6de9cd9a
DN
1158
1159void
1160update_nonlocal_goto_save_area (void)
1161{
1162 tree t_save;
1163 rtx r_save;
1164
1165 /* The nonlocal_goto_save_area object is an array of N pointers. The
1166 first one is used for the frame pointer save; the rest are sized by
1167 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1168 of the stack save area slots. */
6bbec3e1
L
1169 t_save = build4 (ARRAY_REF,
1170 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1171 cfun->nonlocal_goto_save_area,
3244e67d 1172 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1173 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1174
9eac0f2a 1175 emit_stack_save (SAVE_NONLOCAL, &r_save);
6de9cd9a 1176}
d33606c3
EB
1177
1178/* Record a new stack level for the current function. This should be called
1179 whenever we allocate or deallocate dynamic stack space. */
1180
1181void
1182record_new_stack_level (void)
1183{
1184 /* Record the new stack level for nonlocal gotos. */
1185 if (cfun->nonlocal_goto_save_area)
1186 update_nonlocal_goto_save_area ();
1187
1188 /* Record the new stack level for SJLJ exceptions. */
1189 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1190 update_sjlj_context ();
1191}
0358d788 1192
7072df0a 1193/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
0358d788
RL
1194
1195rtx
7072df0a
DV
1196align_dynamic_address (rtx target, unsigned required_align)
1197{
1198 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1199 but we know it can't. So add ourselves and then do
1200 TRUNC_DIV_EXPR. */
1201 target = expand_binop (Pmode, add_optab, target,
1202 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1203 Pmode),
1204 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1205 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1206 gen_int_mode (required_align / BITS_PER_UNIT,
1207 Pmode),
1208 NULL_RTX, 1);
1209 target = expand_mult (Pmode, target,
1210 gen_int_mode (required_align / BITS_PER_UNIT,
1211 Pmode),
1212 NULL_RTX, 1);
18ca7dab 1213
7072df0a
DV
1214 return target;
1215}
18ca7dab 1216
7072df0a
DV
1217/* Return an rtx through *PSIZE, representing the size of an area of memory to
1218 be dynamically pushed on the stack.
1219
1220 *PSIZE is an rtx representing the size of the area.
091ad0b9 1221
3a42502d 1222 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
7072df0a 1223 parameter may be zero. If so, a proper value will be extracted
3a42502d
RH
1224 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1225
1226 REQUIRED_ALIGN is the alignment (in bits) required for the region
1227 of memory.
d3c12306 1228
7072df0a
DV
1229 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1230 the additional size returned. */
1231void
1232get_dynamic_stack_size (rtx *psize, unsigned size_align,
1233 unsigned required_align,
1234 HOST_WIDE_INT *pstack_usage_size)
18ca7dab 1235{
7072df0a 1236 rtx size = *psize;
d3c12306 1237
18ca7dab
RK
1238 /* Ensure the size is in the proper mode. */
1239 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1240 size = convert_to_mode (Pmode, size, 1);
1241
3a42502d
RH
1242 if (CONST_INT_P (size))
1243 {
1244 unsigned HOST_WIDE_INT lsb;
1245
1246 lsb = INTVAL (size);
1247 lsb &= -lsb;
1248
1249 /* Watch out for overflow truncating to "unsigned". */
1250 if (lsb > UINT_MAX / BITS_PER_UNIT)
1251 size_align = 1u << (HOST_BITS_PER_INT - 1);
1252 else
1253 size_align = (unsigned)lsb * BITS_PER_UNIT;
1254 }
1255 else if (size_align < BITS_PER_UNIT)
1256 size_align = BITS_PER_UNIT;
1257
34831f3e
RH
1258 /* We can't attempt to minimize alignment necessary, because we don't
1259 know the final value of preferred_stack_boundary yet while executing
1260 this code. */
1261 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1262 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1263
18ca7dab 1264 /* We will need to ensure that the address we return is aligned to
4fc0c9c8
DV
1265 REQUIRED_ALIGN. At this point in the compilation, we don't always
1266 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1267 (it might depend on the size of the outgoing parameter lists, for
1268 example), so we must preventively align the value. We leave space
1269 in SIZE for the hole that might result from the alignment operation. */
1ecad98e 1270
b9f92c0b
EB
1271 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1272 if (known_align == 0)
1273 known_align = BITS_PER_UNIT;
1274 if (required_align > known_align)
ae85ad3a 1275 {
b9f92c0b 1276 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
ae85ad3a
WD
1277 size = plus_constant (Pmode, size, extra);
1278 size = force_operand (size, NULL_RTX);
b9f92c0b
EB
1279 if (size_align > known_align)
1280 size_align = known_align;
ae85ad3a
WD
1281
1282 if (flag_stack_usage_info && pstack_usage_size)
1283 *pstack_usage_size += extra;
1284 }
1d9d04f8 1285
18ca7dab 1286 /* Round the size to a multiple of the required stack alignment.
1135a133 1287 Since the stack is presumed to be rounded before this allocation,
18ca7dab
RK
1288 this will maintain the required alignment.
1289
1290 If the stack grows downward, we could save an insn by subtracting
1291 SIZE from the stack pointer and then aligning the stack pointer.
1292 The problem with this is that the stack pointer may be unaligned
1293 between the execution of the subtraction and alignment insns and
1294 some machines do not allow this. Even on those that do, some
1295 signal handlers malfunction if a signal should occur between those
1296 insns. Since this is an extremely rare event, we have no reliable
1297 way of knowing which systems have this problem. So we avoid even
1298 momentarily mis-aligning the stack. */
3a42502d 1299 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1300 {
1301 size = round_push (size);
18ca7dab 1302
7072df0a 1303 if (flag_stack_usage_info && pstack_usage_size)
d3c12306 1304 {
32990d5b 1305 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
7072df0a
DV
1306 *pstack_usage_size =
1307 (*pstack_usage_size + align - 1) / align * align;
d3c12306
EB
1308 }
1309 }
1310
7072df0a
DV
1311 *psize = size;
1312}
1313
8c1dd970
JL
1314/* Return the number of bytes to "protect" on the stack for -fstack-check.
1315
f569026a
EB
1316 "protect" in the context of -fstack-check means how many bytes we need
1317 to always ensure are available on the stack; as a consequence, this is
1318 also how many bytes are first skipped when probing the stack.
8c1dd970
JL
1319
1320 On some targets we want to reuse the -fstack-check prologue support
1321 to give a degree of protection against stack clashing style attacks.
1322
1323 In that scenario we do not want to skip bytes before probing as that
1324 would render the stack clash protections useless.
1325
f569026a
EB
1326 So we never use STACK_CHECK_PROTECT directly. Instead we indirectly
1327 use it through this helper, which allows to provide different values
1328 for -fstack-check and -fstack-clash-protection. */
1329
8c1dd970
JL
1330HOST_WIDE_INT
1331get_stack_check_protect (void)
1332{
1333 if (flag_stack_clash_protection)
1334 return 0;
f569026a 1335
8c1dd970
JL
1336 return STACK_CHECK_PROTECT;
1337}
1338
7072df0a
DV
1339/* Return an rtx representing the address of an area of memory dynamically
1340 pushed on the stack.
1341
1342 Any required stack pointer alignment is preserved.
1343
1344 SIZE is an rtx representing the size of the area.
1345
1346 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1347 parameter may be zero. If so, a proper value will be extracted
1348 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1349
1350 REQUIRED_ALIGN is the alignment (in bits) required for the region
1351 of memory.
1352
9e878cf1
EB
1353 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1354 no such upper bound is known.
1355
7072df0a
DV
1356 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1357 stack space allocated by the generated code cannot be added with itself
1358 in the course of the execution of the function. It is always safe to
1359 pass FALSE here and the following criterion is sufficient in order to
1360 pass TRUE: every path in the CFG that starts at the allocation point and
1361 loops to it executes the associated deallocation code. */
1362
1363rtx
1364allocate_dynamic_stack_space (rtx size, unsigned size_align,
9e878cf1
EB
1365 unsigned required_align,
1366 HOST_WIDE_INT max_size,
1367 bool cannot_accumulate)
7072df0a
DV
1368{
1369 HOST_WIDE_INT stack_usage_size = -1;
1370 rtx_code_label *final_label;
1371 rtx final_target, target;
1372
1373 /* If we're asking for zero bytes, it doesn't matter what we point
1374 to since we can't dereference it. But return a reasonable
1375 address anyway. */
1376 if (size == const0_rtx)
1377 return virtual_stack_dynamic_rtx;
1378
1379 /* Otherwise, show we're calling alloca or equivalent. */
1380 cfun->calls_alloca = 1;
1381
1382 /* If stack usage info is requested, look into the size we are passed.
1383 We need to do so this early to avoid the obfuscation that may be
1384 introduced later by the various alignment operations. */
1385 if (flag_stack_usage_info)
1386 {
1387 if (CONST_INT_P (size))
1388 stack_usage_size = INTVAL (size);
1389 else if (REG_P (size))
1390 {
1391 /* Look into the last emitted insn and see if we can deduce
1392 something for the register. */
1393 rtx_insn *insn;
1394 rtx set, note;
1395 insn = get_last_insn ();
1396 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1397 {
1398 if (CONST_INT_P (SET_SRC (set)))
1399 stack_usage_size = INTVAL (SET_SRC (set));
1400 else if ((note = find_reg_equal_equiv_note (insn))
1401 && CONST_INT_P (XEXP (note, 0)))
1402 stack_usage_size = INTVAL (XEXP (note, 0));
1403 }
1404 }
1405
9e878cf1
EB
1406 /* If the size is not constant, try the maximum size. */
1407 if (stack_usage_size < 0)
1408 stack_usage_size = max_size;
1409
1410 /* If the size is still not constant, we can't say anything. */
1411 if (stack_usage_size < 0)
7072df0a
DV
1412 {
1413 current_function_has_unbounded_dynamic_stack_size = 1;
1414 stack_usage_size = 0;
1415 }
1416 }
1417
1418 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1419
3a42502d 1420 target = gen_reg_rtx (Pmode);
7458026b 1421
d3c12306
EB
1422 /* The size is supposed to be fully adjusted at this point so record it
1423 if stack usage info is requested. */
a11e0df4 1424 if (flag_stack_usage_info)
d3c12306
EB
1425 {
1426 current_function_dynamic_stack_size += stack_usage_size;
1427
1428 /* ??? This is gross but the only safe stance in the absence
1429 of stack usage oriented flow analysis. */
1430 if (!cannot_accumulate)
1431 current_function_has_unbounded_dynamic_stack_size = 1;
1432 }
18ca7dab 1433
1c84b798
ILT
1434 do_pending_stack_adjust ();
1435
528a80c1 1436 final_label = NULL;
7458026b
ILT
1437 final_target = NULL_RTX;
1438
1439 /* If we are splitting the stack, we need to ask the backend whether
1440 there is enough room on the current stack. If there isn't, or if
1441 the backend doesn't know how to tell is, then we need to call a
1442 function to allocate memory in some other way. This memory will
1443 be released when we release the current stack segment. The
1444 effect is that stack allocation becomes less efficient, but at
1445 least it doesn't cause a stack overflow. */
1446 if (flag_split_stack)
1447 {
528a80c1
DM
1448 rtx_code_label *available_label;
1449 rtx ask, space, func;
7458026b 1450
528a80c1 1451 available_label = NULL;
7458026b 1452
10169a8b 1453 if (targetm.have_split_stack_space_check ())
7458026b
ILT
1454 {
1455 available_label = gen_label_rtx ();
1456
1457 /* This instruction will branch to AVAILABLE_LABEL if there
1458 are SIZE bytes available on the stack. */
10169a8b
RS
1459 emit_insn (targetm.gen_split_stack_space_check
1460 (size, available_label));
7458026b 1461 }
7458026b 1462
c3928dde 1463 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1464 memory using malloc. If the alignment of the memory returned
1465 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1466 make sure we allocate enough space. */
1467 if (MALLOC_ABI_ALIGNMENT >= required_align)
1468 ask = size;
1469 else
4fc0c9c8
DV
1470 ask = expand_binop (Pmode, add_optab, size,
1471 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1472 Pmode),
1473 NULL_RTX, 1, OPTAB_LIB_WIDEN);
c3928dde 1474
7458026b
ILT
1475 func = init_one_libfunc ("__morestack_allocate_stack_space");
1476
1477 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
db69559b 1478 ask, Pmode);
7458026b
ILT
1479
1480 if (available_label == NULL_RTX)
1481 return space;
1482
1483 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1484
1485 emit_move_insn (final_target, space);
1486
1487 final_label = gen_label_rtx ();
1488 emit_jump (final_label);
1489
1490 emit_label (available_label);
1491 }
1492
1503a7ec 1493 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1494 properly. */
a20c5714
RS
1495 gcc_assert (multiple_p (stack_pointer_delta,
1496 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1503a7ec 1497
d809253a
EB
1498 /* If needed, check that we have the required amount of stack. Take into
1499 account what has already been checked. */
1500 if (STACK_CHECK_MOVING_SP)
1501 ;
1502 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1503 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1504 size);
1505 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
8c1dd970 1506 probe_stack_range (get_stack_check_protect (), size);
edff2491 1507
efec771a
RH
1508 /* Don't let anti_adjust_stack emit notes. */
1509 suppress_reg_args_size = true;
1510
18ca7dab
RK
1511 /* Perform the required allocation from the stack. Some systems do
1512 this differently than simply incrementing/decrementing from the
38a448ca 1513 stack pointer, such as acquiring the space by calling malloc(). */
10169a8b 1514 if (targetm.have_allocate_stack ())
18ca7dab 1515 {
99b1c316 1516 class expand_operand ops[2];
4b6c1672
RK
1517 /* We don't have to check against the predicate for operand 0 since
1518 TARGET is known to be a pseudo of the proper mode, which must
a5c7d693
RS
1519 be valid for the operand. */
1520 create_fixed_operand (&ops[0], target);
1521 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
10169a8b 1522 expand_insn (targetm.code_for_allocate_stack, 2, ops);
18ca7dab
RK
1523 }
1524 else
ea534b63 1525 {
a20c5714 1526 poly_int64 saved_stack_pointer_delta;
32990d5b 1527
581edfa3
TS
1528 if (!STACK_GROWS_DOWNWARD)
1529 emit_move_insn (target, virtual_stack_dynamic_rtx);
a157febd
GK
1530
1531 /* Check stack bounds if necessary. */
e3b5732b 1532 if (crtl->limit_stack)
a157febd
GK
1533 {
1534 rtx available;
528a80c1 1535 rtx_code_label *space_available = gen_label_rtx ();
581edfa3
TS
1536 if (STACK_GROWS_DOWNWARD)
1537 available = expand_binop (Pmode, sub_optab,
1538 stack_pointer_rtx, stack_limit_rtx,
1539 NULL_RTX, 1, OPTAB_WIDEN);
1540 else
1541 available = expand_binop (Pmode, sub_optab,
1542 stack_limit_rtx, stack_pointer_rtx,
1543 NULL_RTX, 1, OPTAB_WIDEN);
1544
a157febd 1545 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1546 space_available);
eb6f47fb
RS
1547 if (targetm.have_trap ())
1548 emit_insn (targetm.gen_trap ());
a157febd 1549 else
a157febd
GK
1550 error ("stack limits not supported on this target");
1551 emit_barrier ();
1552 emit_label (space_available);
1553 }
1554
32990d5b 1555 saved_stack_pointer_delta = stack_pointer_delta;
9a08d230 1556
f569026a
EB
1557 /* If stack checking or stack clash protection is requested,
1558 then probe the stack while allocating space from it. */
d809253a 1559 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1560 anti_adjust_stack_and_probe (size, false);
8c1dd970
JL
1561 else if (flag_stack_clash_protection)
1562 anti_adjust_stack_and_probe_stack_clash (size);
d809253a
EB
1563 else
1564 anti_adjust_stack (size);
9a08d230 1565
32990d5b
JJ
1566 /* Even if size is constant, don't modify stack_pointer_delta.
1567 The constant size alloca should preserve
1568 crtl->preferred_stack_boundary alignment. */
1569 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1570
581edfa3
TS
1571 if (STACK_GROWS_DOWNWARD)
1572 emit_move_insn (target, virtual_stack_dynamic_rtx);
38a448ca 1573 }
18ca7dab 1574
efec771a
RH
1575 suppress_reg_args_size = false;
1576
3a42502d
RH
1577 /* Finish up the split stack handling. */
1578 if (final_label != NULL_RTX)
1579 {
1580 gcc_assert (flag_split_stack);
1581 emit_move_insn (final_target, target);
1582 emit_label (final_label);
1583 target = final_target;
1584 }
1585
7072df0a 1586 target = align_dynamic_address (target, required_align);
d9b3eb63 1587
3a42502d
RH
1588 /* Now that we've committed to a return value, mark its alignment. */
1589 mark_reg_pointer (target, required_align);
1590
d33606c3
EB
1591 /* Record the new stack level. */
1592 record_new_stack_level ();
15fc0026 1593
18ca7dab
RK
1594 return target;
1595}
7072df0a
DV
1596
1597/* Return an rtx representing the address of an area of memory already
1598 statically pushed onto the stack in the virtual stack vars area. (It is
1599 assumed that the area is allocated in the function prologue.)
1600
1601 Any required stack pointer alignment is preserved.
1602
1603 OFFSET is the offset of the area into the virtual stack vars area.
1604
1605 REQUIRED_ALIGN is the alignment (in bits) required for the region
0854b584
MM
1606 of memory.
1607
1608 BASE is the rtx of the base of this virtual stack vars area.
1609 The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
1610 on the stack. */
7072df0a
DV
1611
1612rtx
0854b584 1613get_dynamic_stack_base (poly_int64 offset, unsigned required_align, rtx base)
7072df0a
DV
1614{
1615 rtx target;
1616
1617 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1618 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1619
1620 target = gen_reg_rtx (Pmode);
0854b584 1621 emit_move_insn (target, base);
7072df0a
DV
1622 target = expand_binop (Pmode, add_optab, target,
1623 gen_int_mode (offset, Pmode),
1624 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1625 target = align_dynamic_address (target, required_align);
1626
1627 /* Now that we've committed to a return value, mark its alignment. */
1628 mark_reg_pointer (target, required_align);
1629
1630 return target;
1631}
18ca7dab 1632\f
d9b3eb63 1633/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1634 run-time routine to call to check the stack, so provide a mechanism for
1635 calling that routine. */
1636
e2500fed 1637static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1638
1639void
d477d1fe 1640set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1641{
d477d1fe
SB
1642 gcc_assert (stack_check_libfunc == NULL_RTX);
1643 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
3cf3da88
EB
1644 tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
1645 get_identifier (libfunc_name), void_type_node);
1646 DECL_EXTERNAL (decl) = 1;
1647 SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
14a774a9
RK
1648}
1649\f
edff2491
RK
1650/* Emit one stack probe at ADDRESS, an address within the stack. */
1651
260c8ba3 1652void
502b8322 1653emit_stack_probe (rtx address)
edff2491 1654{
10169a8b 1655 if (targetm.have_probe_stack_address ())
5c35bc3e 1656 {
99b1c316 1657 class expand_operand ops[1];
5c35bc3e
KT
1658 insn_code icode = targetm.code_for_probe_stack_address;
1659 create_address_operand (ops, address);
1660 maybe_legitimize_operands (icode, 0, 1, ops);
1661 expand_insn (icode, 1, ops);
1662 }
7b84aac0 1663 else
7b84aac0
EB
1664 {
1665 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491 1666
7b84aac0 1667 MEM_VOLATILE_P (memref) = 1;
5c35bc3e 1668 memref = validize_mem (memref);
edff2491 1669
7b84aac0 1670 /* See if we have an insn to probe the stack. */
10169a8b 1671 if (targetm.have_probe_stack ())
5c35bc3e 1672 emit_insn (targetm.gen_probe_stack (memref));
7b84aac0 1673 else
5c35bc3e 1674 emit_move_insn (memref, const0_rtx);
7b84aac0 1675 }
edff2491
RK
1676}
1677
d9b3eb63 1678/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1679 FIRST is a constant and size is a Pmode RTX. These are offsets from
1680 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1681 or subtract them from the stack pointer. */
1682
1683#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491 1684
62f9f30b 1685#if STACK_GROWS_DOWNWARD
edff2491 1686#define STACK_GROW_OP MINUS
d809253a
EB
1687#define STACK_GROW_OPTAB sub_optab
1688#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1689#else
1690#define STACK_GROW_OP PLUS
d809253a
EB
1691#define STACK_GROW_OPTAB add_optab
1692#define STACK_GROW_OFF(off) (off)
edff2491
RK
1693#endif
1694
1695void
502b8322 1696probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1697{
4b6c1672
RK
1698 /* First ensure SIZE is Pmode. */
1699 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1700 size = convert_to_mode (Pmode, size, 1);
1701
d809253a
EB
1702 /* Next see if we have a function to check the stack. */
1703 if (stack_check_libfunc)
f5f5363f 1704 {
d809253a 1705 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1706 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1707 stack_pointer_rtx,
0a81f074
RS
1708 plus_constant (Pmode,
1709 size, first)));
db69559b
RS
1710 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1711 addr, Pmode);
f5f5363f 1712 }
14a774a9 1713
d809253a 1714 /* Next see if we have an insn to check the stack. */
10169a8b 1715 else if (targetm.have_check_stack ())
edff2491 1716 {
99b1c316 1717 class expand_operand ops[1];
d809253a
EB
1718 rtx addr = memory_address (Pmode,
1719 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1720 stack_pointer_rtx,
0a81f074
RS
1721 plus_constant (Pmode,
1722 size, first)));
d6a6a07a 1723 bool success;
a5c7d693 1724 create_input_operand (&ops[0], addr, Pmode);
10169a8b 1725 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
d6a6a07a 1726 gcc_assert (success);
edff2491 1727 }
edff2491 1728
d809253a
EB
1729 /* Otherwise we have to generate explicit probes. If we have a constant
1730 small number of them to generate, that's the easy case. */
1731 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1732 {
d809253a
EB
1733 HOST_WIDE_INT isize = INTVAL (size), i;
1734 rtx addr;
1735
1736 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1737 it exceeds SIZE. If only one probe is needed, this will not
1738 generate any code. Then probe at FIRST + SIZE. */
1739 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1740 {
1741 addr = memory_address (Pmode,
0a81f074 1742 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1743 STACK_GROW_OFF (first + i)));
1744 emit_stack_probe (addr);
1745 }
1746
1747 addr = memory_address (Pmode,
0a81f074 1748 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1749 STACK_GROW_OFF (first + isize)));
1750 emit_stack_probe (addr);
edff2491
RK
1751 }
1752
d809253a
EB
1753 /* In the variable case, do the same as above, but in a loop. Note that we
1754 must be extra careful with variables wrapping around because we might be
1755 at the very top (or the very bottom) of the address space and we have to
1756 be able to handle this case properly; in particular, we use an equality
1757 test for the loop condition. */
edff2491
RK
1758 else
1759 {
d809253a 1760 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
528a80c1
DM
1761 rtx_code_label *loop_lab = gen_label_rtx ();
1762 rtx_code_label *end_lab = gen_label_rtx ();
edff2491 1763
d809253a
EB
1764 /* Step 1: round SIZE to the previous multiple of the interval. */
1765
1766 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1767 rounded_size
69a59f0f
RS
1768 = simplify_gen_binary (AND, Pmode, size,
1769 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1770 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1771
1772
1773 /* Step 2: compute initial and final value of the loop counter. */
1774
1775 /* TEST_ADDR = SP + FIRST. */
1776 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1777 stack_pointer_rtx,
4789c0ce
RS
1778 gen_int_mode (first, Pmode)),
1779 NULL_RTX);
d809253a
EB
1780
1781 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1782 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1783 test_addr,
1784 rounded_size_op), NULL_RTX);
1785
1786
1787 /* Step 3: the loop
1788
1789 while (TEST_ADDR != LAST_ADDR)
1790 {
1791 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1792 probe at TEST_ADDR
1793 }
1794
1795 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1796 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1797
1798 emit_label (loop_lab);
edff2491 1799
d809253a
EB
1800 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1801 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1802 end_lab);
1803
1804 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1805 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
2f1cd2eb 1806 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
edff2491 1807 1, OPTAB_WIDEN);
edff2491 1808
5b0264cb 1809 gcc_assert (temp == test_addr);
edff2491 1810
d809253a
EB
1811 /* Probe at TEST_ADDR. */
1812 emit_stack_probe (test_addr);
1813
1814 emit_jump (loop_lab);
1815
edff2491
RK
1816 emit_label (end_lab);
1817
d809253a
EB
1818
1819 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1820 that SIZE is equal to ROUNDED_SIZE. */
1821
1822 /* TEMP = SIZE - ROUNDED_SIZE. */
1823 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1824 if (temp != const0_rtx)
1825 {
1826 rtx addr;
1827
32990d5b 1828 if (CONST_INT_P (temp))
d809253a
EB
1829 {
1830 /* Use [base + disp} addressing mode if supported. */
1831 HOST_WIDE_INT offset = INTVAL (temp);
1832 addr = memory_address (Pmode,
0a81f074 1833 plus_constant (Pmode, last_addr,
d809253a
EB
1834 STACK_GROW_OFF (offset)));
1835 }
1836 else
1837 {
1838 /* Manual CSE if the difference is not known at compile-time. */
1839 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1840 addr = memory_address (Pmode,
1841 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1842 last_addr, temp));
1843 }
1844
1845 emit_stack_probe (addr);
1846 }
edff2491 1847 }
eabcc725
EB
1848
1849 /* Make sure nothing is scheduled before we are done. */
1850 emit_insn (gen_blockage ());
edff2491 1851}
d809253a 1852
8c1dd970
JL
1853/* Compute parameters for stack clash probing a dynamic stack
1854 allocation of SIZE bytes.
1855
1856 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1857
1858 Additionally we conditionally dump the type of probing that will
1859 be needed given the values computed. */
1860
1861void
1862compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1863 rtx *residual,
1864 HOST_WIDE_INT *probe_interval,
1865 rtx size)
1866{
1867 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1868 *probe_interval
028d4092 1869 = 1 << param_stack_clash_protection_probe_interval;
8c1dd970
JL
1870 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1871 GEN_INT (-*probe_interval));
1872
1873 /* Compute the value of the stack pointer for the last iteration.
1874 It's just SP + ROUNDED_SIZE. */
1875 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1876 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1877 stack_pointer_rtx,
1878 rounded_size_op),
1879 NULL_RTX);
1880
1881 /* Compute any residuals not allocated by the loop above. Residuals
1882 are just the ROUNDED_SIZE - SIZE. */
1883 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1884
1885 /* Dump key information to make writing tests easy. */
1886 if (dump_file)
1887 {
1888 if (*rounded_size == CONST0_RTX (Pmode))
1889 fprintf (dump_file,
1890 "Stack clash skipped dynamic allocation and probing loop.\n");
94c23e39 1891 else if (CONST_INT_P (*rounded_size)
8c1dd970
JL
1892 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1893 fprintf (dump_file,
1894 "Stack clash dynamic allocation and probing inline.\n");
94c23e39 1895 else if (CONST_INT_P (*rounded_size))
8c1dd970
JL
1896 fprintf (dump_file,
1897 "Stack clash dynamic allocation and probing in "
1898 "rotated loop.\n");
1899 else
1900 fprintf (dump_file,
1901 "Stack clash dynamic allocation and probing in loop.\n");
1902
1903 if (*residual != CONST0_RTX (Pmode))
1904 fprintf (dump_file,
1905 "Stack clash dynamic allocation and probing residuals.\n");
1906 else
1907 fprintf (dump_file,
1908 "Stack clash skipped dynamic allocation and "
1909 "probing residuals.\n");
1910 }
1911}
1912
1913/* Emit the start of an allocate/probe loop for stack
1914 clash protection.
1915
1916 LOOP_LAB and END_LAB are returned for use when we emit the
1917 end of the loop.
1918
1919 LAST addr is the value for SP which stops the loop. */
1920void
1921emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1922 rtx *end_lab,
1923 rtx last_addr,
1924 bool rotated)
1925{
1926 /* Essentially we want to emit any setup code, the top of loop
1927 label and the comparison at the top of the loop. */
1928 *loop_lab = gen_label_rtx ();
1929 *end_lab = gen_label_rtx ();
1930
1931 emit_label (*loop_lab);
1932 if (!rotated)
1933 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1934 Pmode, 1, *end_lab);
1935}
1936
1937/* Emit the end of a stack clash probing loop.
1938
1939 This consists of just the jump back to LOOP_LAB and
1940 emitting END_LOOP after the loop. */
1941
1942void
1943emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1944 rtx last_addr, bool rotated)
1945{
1946 if (rotated)
1947 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1948 Pmode, 1, loop_lab);
1949 else
1950 emit_jump (loop_lab);
1951
1952 emit_label (end_loop);
1953
1954}
1955
1956/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1957 while probing it. This pushes when SIZE is positive. SIZE need not
1958 be constant.
1959
1960 This is subtly different than anti_adjust_stack_and_probe to try and
1961 prevent stack-clash attacks
1962
1963 1. It must assume no knowledge of the probing state, any allocation
1964 must probe.
1965
1966 Consider the case of a 1 byte alloca in a loop. If the sum of the
1967 allocations is large, then this could be used to jump the guard if
1968 probes were not emitted.
1969
1970 2. It never skips probes, whereas anti_adjust_stack_and_probe will
f569026a
EB
1971 skip the probe on the first PROBE_INTERVAL on the assumption it
1972 was already done in the prologue and in previous allocations.
8c1dd970
JL
1973
1974 3. It only allocates and probes SIZE bytes, it does not need to
1975 allocate/probe beyond that because this probing style does not
1976 guarantee signal handling capability if the guard is hit. */
1977
d3e5bae1 1978void
8c1dd970
JL
1979anti_adjust_stack_and_probe_stack_clash (rtx size)
1980{
1981 /* First ensure SIZE is Pmode. */
1982 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1983 size = convert_to_mode (Pmode, size, 1);
1984
1985 /* We can get here with a constant size on some targets. */
1986 rtx rounded_size, last_addr, residual;
2c25083e
TC
1987 HOST_WIDE_INT probe_interval, probe_range;
1988 bool target_probe_range_p = false;
8c1dd970
JL
1989 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1990 &residual, &probe_interval, size);
1991
2c25083e
TC
1992 /* Get the back-end specific probe ranges. */
1993 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1994 target_probe_range_p = probe_range != 0;
1995 gcc_assert (probe_range >= 0);
1996
1997 /* If no back-end specific range defined, default to the top of the newly
1998 allocated range. */
1999 if (probe_range == 0)
2000 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
2001
8c1dd970
JL
2002 if (rounded_size != CONST0_RTX (Pmode))
2003 {
94c23e39
JL
2004 if (CONST_INT_P (rounded_size)
2005 && INTVAL (rounded_size) <= 4 * probe_interval)
8c1dd970
JL
2006 {
2007 for (HOST_WIDE_INT i = 0;
2008 i < INTVAL (rounded_size);
2009 i += probe_interval)
2010 {
2011 anti_adjust_stack (GEN_INT (probe_interval));
8c1dd970
JL
2012 /* The prologue does not probe residuals. Thus the offset
2013 here to probe just beyond what the prologue had already
2014 allocated. */
2015 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e
TC
2016 probe_range));
2017
8c1dd970
JL
2018 emit_insn (gen_blockage ());
2019 }
2020 }
2021 else
2022 {
2023 rtx loop_lab, end_loop;
94c23e39 2024 bool rotate_loop = CONST_INT_P (rounded_size);
8c1dd970
JL
2025 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
2026 last_addr, rotate_loop);
2027
2028 anti_adjust_stack (GEN_INT (probe_interval));
2029
2030 /* The prologue does not probe residuals. Thus the offset here
2c25083e
TC
2031 to probe just beyond what the prologue had already
2032 allocated. */
8c1dd970 2033 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e 2034 probe_range));
8c1dd970
JL
2035
2036 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2037 last_addr, rotate_loop);
2038 emit_insn (gen_blockage ());
2039 }
2040 }
2041
2042 if (residual != CONST0_RTX (Pmode))
2043 {
86aa0691
JL
2044 rtx label = NULL_RTX;
2045 /* RESIDUAL could be zero at runtime and in that case *sp could
2046 hold live data. Furthermore, we do not want to probe into the
2047 red zone.
2048
2c25083e
TC
2049 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2050 probe at offset 0. In which case we no longer have to check for
2051 RESIDUAL == 0. However we still need to probe at the right offset
2052 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2053
2054 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2055 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2056 */
2057 anti_adjust_stack (residual);
2058
86aa0691
JL
2059 if (!CONST_INT_P (residual))
2060 {
2061 label = gen_label_rtx ();
2c25083e
TC
2062 rtx_code op = target_probe_range_p ? LT : EQ;
2063 rtx probe_cmp_value = target_probe_range_p
2064 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2065 : CONST0_RTX (GET_MODE (residual));
86aa0691 2066
2c25083e
TC
2067 if (target_probe_range_p)
2068 emit_stack_probe (stack_pointer_rtx);
8c1dd970 2069
2c25083e
TC
2070 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2071 op, NULL_RTX, Pmode, 1, label);
2072 }
2a6fc987 2073
2c25083e
TC
2074 rtx x = NULL_RTX;
2075
2076 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2077 by the ABI defined safe value. */
2078 if (!CONST_INT_P (residual) && target_probe_range_p)
2079 x = GEN_INT (probe_range);
2080 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2081 we still want to probe up, but the safest amount if a word. */
2082 else if (target_probe_range_p)
8c1dd970 2083 {
2c25083e
TC
2084 if (INTVAL (residual) <= probe_range)
2085 x = GEN_INT (GET_MODE_SIZE (word_mode));
2086 else
2087 x = GEN_INT (probe_range);
8c1dd970 2088 }
2c25083e
TC
2089 else
2090 /* If nothing else, probe at the top of the new allocation. */
2091 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2092
2093 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
86aa0691 2094
86aa0691 2095 emit_insn (gen_blockage ());
2c25083e
TC
2096 if (!CONST_INT_P (residual))
2097 emit_label (label);
8c1dd970
JL
2098 }
2099}
2100
2101
c35af30f
EB
2102/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2103 while probing it. This pushes when SIZE is positive. SIZE need not
2104 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2105 by plus SIZE at the end. */
d809253a 2106
c35af30f
EB
2107void
2108anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 2109{
c35af30f
EB
2110 /* We skip the probe for the first interval + a small dope of 4 words and
2111 probe that many bytes past the specified size to maintain a protection
2112 area at the botton of the stack. */
d809253a
EB
2113 const int dope = 4 * UNITS_PER_WORD;
2114
2115 /* First ensure SIZE is Pmode. */
2116 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2117 size = convert_to_mode (Pmode, size, 1);
2118
2119 /* If we have a constant small number of probes to generate, that's the
2120 easy case. */
32990d5b 2121 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
2122 {
2123 HOST_WIDE_INT isize = INTVAL (size), i;
2124 bool first_probe = true;
2125
260c8ba3 2126 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2127 values of N from 1 until it exceeds SIZE. If only one probe is
2128 needed, this will not generate any code. Then adjust and probe
2129 to PROBE_INTERVAL + SIZE. */
2130 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2131 {
2132 if (first_probe)
2133 {
2134 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2135 first_probe = false;
2136 }
2137 else
2138 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2139 emit_stack_probe (stack_pointer_rtx);
2140 }
2141
2142 if (first_probe)
0a81f074 2143 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d809253a 2144 else
0a81f074 2145 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
d809253a
EB
2146 emit_stack_probe (stack_pointer_rtx);
2147 }
2148
2149 /* In the variable case, do the same as above, but in a loop. Note that we
2150 must be extra careful with variables wrapping around because we might be
2151 at the very top (or the very bottom) of the address space and we have to
2152 be able to handle this case properly; in particular, we use an equality
2153 test for the loop condition. */
2154 else
2155 {
2156 rtx rounded_size, rounded_size_op, last_addr, temp;
528a80c1
DM
2157 rtx_code_label *loop_lab = gen_label_rtx ();
2158 rtx_code_label *end_lab = gen_label_rtx ();
d809253a
EB
2159
2160
2161 /* Step 1: round SIZE to the previous multiple of the interval. */
2162
2163 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2164 rounded_size
69a59f0f
RS
2165 = simplify_gen_binary (AND, Pmode, size,
2166 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
2167 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2168
2169
2170 /* Step 2: compute initial and final value of the loop counter. */
2171
2172 /* SP = SP_0 + PROBE_INTERVAL. */
2173 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2174
2175 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2176 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2177 stack_pointer_rtx,
2178 rounded_size_op), NULL_RTX);
2179
2180
2181 /* Step 3: the loop
2182
260c8ba3
EB
2183 while (SP != LAST_ADDR)
2184 {
2185 SP = SP + PROBE_INTERVAL
2186 probe at SP
2187 }
d809253a 2188
260c8ba3 2189 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2190 values of N from 1 until it is equal to ROUNDED_SIZE. */
2191
2192 emit_label (loop_lab);
2193
2194 /* Jump to END_LAB if SP == LAST_ADDR. */
2195 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2196 Pmode, 1, end_lab);
2197
2198 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2199 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2200 emit_stack_probe (stack_pointer_rtx);
2201
2202 emit_jump (loop_lab);
2203
2204 emit_label (end_lab);
2205
2206
260c8ba3 2207 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
2208 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2209
2210 /* TEMP = SIZE - ROUNDED_SIZE. */
2211 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2212 if (temp != const0_rtx)
2213 {
2214 /* Manual CSE if the difference is not known at compile-time. */
2215 if (GET_CODE (temp) != CONST_INT)
2216 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2217 anti_adjust_stack (temp);
2218 emit_stack_probe (stack_pointer_rtx);
2219 }
2220 }
2221
c35af30f
EB
2222 /* Adjust back and account for the additional first interval. */
2223 if (adjust_back)
0a81f074 2224 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
c35af30f
EB
2225 else
2226 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
2227}
2228
18ca7dab
RK
2229/* Return an rtx representing the register or memory location
2230 in which a scalar value of data type VALTYPE
2231 was returned by a function call to function FUNC.
1d636cc6
RG
2232 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2233 function is known, otherwise 0.
4dc07bd7
JJ
2234 OUTGOING is 1 if on a machine with register windows this function
2235 should return the register in which the function will put its result
30f7a378 2236 and 0 otherwise. */
18ca7dab
RK
2237
2238rtx
586de218 2239hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 2240 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 2241{
4dc07bd7 2242 rtx val;
770ae6cc 2243
1d636cc6 2244 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 2245
f8cfc6aa 2246 if (REG_P (val)
e1a4071f
JL
2247 && GET_MODE (val) == BLKmode)
2248 {
974aedcc 2249 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
59b51186 2250 opt_scalar_int_mode tmpmode;
770ae6cc 2251
d9b3eb63 2252 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
2253 since the value of bytes will then be large enough that no
2254 mode will match anyway. */
d9b3eb63 2255
c94843d2 2256 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
0fb7aeda
KH
2257 {
2258 /* Have we found a large enough mode? */
59b51186 2259 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
0fb7aeda
KH
2260 break;
2261 }
e1a4071f 2262
59b51186 2263 PUT_MODE (val, tmpmode.require ());
d9b3eb63 2264 }
e1a4071f 2265 return val;
18ca7dab
RK
2266}
2267
2268/* Return an rtx representing the register or memory location
2269 in which a scalar value of mode MODE was returned by a library call. */
2270
2271rtx
ef4bddc2 2272hard_libcall_value (machine_mode mode, rtx fun)
18ca7dab 2273{
390b17c2 2274 return targetm.calls.libcall_value (mode, fun);
18ca7dab 2275}
0c5e217d
RS
2276
2277/* Look up the tree code for a given rtx code
5c88ea94 2278 to provide the arithmetic operation for real_arithmetic.
0c5e217d
RS
2279 The function returns an int because the caller may not know
2280 what `enum tree_code' means. */
2281
2282int
502b8322 2283rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
2284{
2285 enum tree_code tcode;
2286
2287 switch (code)
2288 {
2289 case PLUS:
2290 tcode = PLUS_EXPR;
2291 break;
2292 case MINUS:
2293 tcode = MINUS_EXPR;
2294 break;
2295 case MULT:
2296 tcode = MULT_EXPR;
2297 break;
2298 case DIV:
2299 tcode = RDIV_EXPR;
2300 break;
2301 case SMIN:
2302 tcode = MIN_EXPR;
2303 break;
2304 case SMAX:
2305 tcode = MAX_EXPR;
2306 break;
2307 default:
2308 tcode = LAST_AND_UNUSED_TREE_CODE;
2309 break;
2310 }
2311 return ((int) tcode);
2312}
e2500fed
GK
2313
2314#include "gt-explow.h"