]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.cc
docs: Fix expected diagnostics URL [PR107599]
[thirdparty/gcc.git] / gcc / explow.cc
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
7adcbafe 2 Copyright (C) 1987-2022 Free Software Foundation, Inc.
18ca7dab 3
1322177d 4This file is part of GCC.
18ca7dab 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
18ca7dab 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
18ca7dab
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
18ca7dab
RK
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
957060b5
AM
24#include "target.h"
25#include "function.h"
18ca7dab
RK
26#include "rtl.h"
27#include "tree.h"
4d0cdd0c 28#include "memmodel.h"
6baf1cc8 29#include "tm_p.h"
e34153b0 30#include "optabs.h"
957060b5 31#include "expmed.h"
357067f2 32#include "profile-count.h"
957060b5
AM
33#include "emit-rtl.h"
34#include "recog.h"
35#include "diagnostic-core.h"
957060b5 36#include "stor-layout.h"
97909f80 37#include "langhooks.h"
b38f3813 38#include "except.h"
36566b39
PK
39#include "dojump.h"
40#include "explow.h"
18ca7dab 41#include "expr.h"
3cf3da88 42#include "stringpool.h"
677f3fa8 43#include "common/common-target.h"
aacd3885 44#include "output.h"
18ca7dab 45
502b8322 46static rtx break_out_memory_refs (rtx);
7e4ce834
RH
47
48
49/* Truncate and perhaps sign-extend C as appropriate for MODE. */
50
51HOST_WIDE_INT
ef4bddc2 52trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
7e4ce834 53{
db61b7f9
RS
54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode = as_a <scalar_mode> (mode);
56 int width = GET_MODE_PRECISION (smode);
7e4ce834 57
71012d97 58 /* You want to truncate to a _what_? */
eafa30ef 59 gcc_assert (SCALAR_INT_MODE_P (mode));
71012d97 60
1f3f36d1 61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
db61b7f9 62 if (smode == BImode)
1f3f36d1
RH
63 return c & 1 ? STORE_FLAG_VALUE : 0;
64
5b0d91c3
AO
65 /* Sign-extend for the requested mode. */
66
67 if (width < HOST_BITS_PER_WIDE_INT)
68 {
69 HOST_WIDE_INT sign = 1;
70 sign <<= width - 1;
71 c &= (sign << 1) - 1;
72 c ^= sign;
73 c -= sign;
74 }
7e4ce834
RH
75
76 return c;
77}
78
0c12fc9b
RS
79/* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
81
82poly_int64
83trunc_int_for_mode (poly_int64 x, machine_mode mode)
84{
85 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
86 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
87 return x;
88}
89
929e10f4 90/* Return an rtx for the sum of X and the integer C, given that X has
23b33725
RS
91 mode MODE. INPLACE is true if X can be modified inplace or false
92 if it must be treated as immutable. */
18ca7dab
RK
93
94rtx
0c12fc9b 95plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
18ca7dab 96{
b3694847 97 RTX_CODE code;
17ab7c59 98 rtx y;
b3694847 99 rtx tem;
18ca7dab
RK
100 int all_constant = 0;
101
0a81f074
RS
102 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
103
0c12fc9b 104 if (known_eq (c, 0))
18ca7dab
RK
105 return x;
106
107 restart:
108
109 code = GET_CODE (x);
17ab7c59
RK
110 y = x;
111
18ca7dab
RK
112 switch (code)
113 {
807e902e 114 CASE_CONST_SCALAR_INT:
f079167a 115 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
18ca7dab
RK
116 case MEM:
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122 {
8a27cf73
UB
123 rtx cst = get_pool_constant (XEXP (x, 0));
124
125 if (GET_CODE (cst) == CONST_VECTOR
126 && GET_MODE_INNER (GET_MODE (cst)) == mode)
127 {
128 cst = gen_lowpart (mode, cst);
129 gcc_assert (cst);
130 }
0ab503d3
JJ
131 else if (GET_MODE (cst) == VOIDmode
132 && get_pool_mode (XEXP (x, 0)) != mode)
133 break;
2b568899
RB
134 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
135 {
136 tem = plus_constant (mode, cst, c);
137 tem = force_const_mem (GET_MODE (x), tem);
138 /* Targets may disallow some constants in the constant pool, thus
139 force_const_mem may return NULL_RTX. */
140 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
141 return tem;
142 }
18ca7dab
RK
143 }
144 break;
145
146 case CONST:
147 /* If adding to something entirely constant, set a flag
148 so that we can add a CONST around the result. */
23b33725
RS
149 if (inplace && shared_const_p (x))
150 inplace = false;
18ca7dab
RK
151 x = XEXP (x, 0);
152 all_constant = 1;
153 goto restart;
154
155 case SYMBOL_REF:
156 case LABEL_REF:
157 all_constant = 1;
158 break;
159
160 case PLUS:
929e10f4
MS
161 /* The interesting case is adding the integer to a sum. Look
162 for constant term in the sum and combine with C. For an
163 integer constant term or a constant term that is not an
164 explicit integer, we combine or group them together anyway.
03d937fc
R
165
166 We may not immediately return from the recursive call here, lest
167 all_constant gets lost. */
e5671f2b 168
929e10f4 169 if (CONSTANT_P (XEXP (x, 1)))
03d937fc 170 {
23b33725
RS
171 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
172 if (term == const0_rtx)
173 x = XEXP (x, 0);
174 else if (inplace)
175 XEXP (x, 1) = term;
176 else
177 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
03d937fc
R
178 c = 0;
179 }
23b33725 180 else if (rtx *const_loc = find_constant_term_loc (&y))
03d937fc 181 {
23b33725
RS
182 if (!inplace)
183 {
184 /* We need to be careful since X may be shared and we can't
185 modify it in place. */
186 x = copy_rtx (x);
187 const_loc = find_constant_term_loc (&x);
188 }
189 *const_loc = plus_constant (mode, *const_loc, c, true);
03d937fc
R
190 c = 0;
191 }
38a448ca 192 break;
ed8908e7 193
38a448ca 194 default:
0c12fc9b
RS
195 if (CONST_POLY_INT_P (x))
196 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
38a448ca 197 break;
18ca7dab
RK
198 }
199
0c12fc9b 200 if (maybe_ne (c, 0))
4789c0ce 201 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
18ca7dab
RK
202
203 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
204 return x;
205 else if (all_constant)
38a448ca 206 return gen_rtx_CONST (mode, x);
18ca7dab
RK
207 else
208 return x;
209}
18ca7dab
RK
210\f
211/* If X is a sum, return a new sum like X but lacking any constant terms.
212 Add all the removed constant terms into *CONSTPTR.
213 X itself is not altered. The result != X if and only if
214 it is not isomorphic to X. */
215
216rtx
502b8322 217eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 218{
b3694847 219 rtx x0, x1;
18ca7dab
RK
220 rtx tem;
221
222 if (GET_CODE (x) != PLUS)
223 return x;
224
225 /* First handle constants appearing at this level explicitly. */
481683e1 226 if (CONST_INT_P (XEXP (x, 1))
01512446
JJ
227 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
228 XEXP (x, 1))) != 0
481683e1 229 && CONST_INT_P (tem))
18ca7dab
RK
230 {
231 *constptr = tem;
232 return eliminate_constant_term (XEXP (x, 0), constptr);
233 }
234
235 tem = const0_rtx;
236 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
237 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
238 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
01512446
JJ
239 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
240 *constptr, tem)) != 0
481683e1 241 && CONST_INT_P (tem))
18ca7dab
RK
242 {
243 *constptr = tem;
38a448ca 244 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
245 }
246
247 return x;
248}
249
18ca7dab
RK
250\f
251/* Return a copy of X in which all memory references
252 and all constants that involve symbol refs
253 have been replaced with new temporary registers.
254 Also emit code to load the memory locations and constants
255 into those registers.
256
257 If X contains no such constants or memory references,
258 X itself (not a copy) is returned.
259
260 If a constant is found in the address that is not a legitimate constant
261 in an insn, it is left alone in the hope that it might be valid in the
262 address.
263
264 X may contain no arithmetic except addition, subtraction and multiplication.
265 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
266
267static rtx
502b8322 268break_out_memory_refs (rtx x)
18ca7dab 269{
3c0cb5de 270 if (MEM_P (x)
cabeca29 271 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 272 && GET_MODE (x) != VOIDmode))
2cca6e3f 273 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
274 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
275 || GET_CODE (x) == MULT)
276 {
b3694847
SS
277 rtx op0 = break_out_memory_refs (XEXP (x, 0));
278 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 279
18ca7dab 280 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 281 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 282 }
2cca6e3f 283
18ca7dab
RK
284 return x;
285}
286
d4ebfa65
BE
287/* Given X, a memory address in address space AS' pointer mode, convert it to
288 an address in the address space's address mode, or vice versa (TO_MODE says
289 which way). We take advantage of the fact that pointers are not allowed to
290 overflow by commuting arithmetic operations over conversions so that address
7745730f 291 arithmetic insns can be used. IN_CONST is true if this conversion is inside
3d3f9e7e
JJ
292 a CONST. NO_EMIT is true if no insns should be emitted, and instead
293 it should return NULL if it can't be simplified without emitting insns. */
ea534b63 294
3d3f9e7e 295rtx
095a2d76 296convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
7745730f 297 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
3d3f9e7e
JJ
298 bool in_const ATTRIBUTE_UNUSED,
299 bool no_emit ATTRIBUTE_UNUSED)
ea534b63 300{
5ae6cd0d 301#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 302 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
303 return x;
304#else /* defined(POINTERS_EXTEND_UNSIGNED) */
095a2d76 305 scalar_int_mode pointer_mode, address_mode, from_mode;
498b529f 306 rtx temp;
aa0f70e6 307 enum rtx_code code;
498b529f 308
5ae6cd0d
MM
309 /* If X already has the right mode, just return it. */
310 if (GET_MODE (x) == to_mode)
311 return x;
312
d4ebfa65
BE
313 pointer_mode = targetm.addr_space.pointer_mode (as);
314 address_mode = targetm.addr_space.address_mode (as);
315 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 316
0b04ec8c
RK
317 /* Here we handle some special cases. If none of them apply, fall through
318 to the default case. */
ea534b63
RK
319 switch (GET_CODE (x))
320 {
d8116890 321 CASE_CONST_SCALAR_INT:
aa0f70e6
SE
322 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
323 code = TRUNCATE;
324 else if (POINTERS_EXTEND_UNSIGNED < 0)
325 break;
326 else if (POINTERS_EXTEND_UNSIGNED > 0)
327 code = ZERO_EXTEND;
328 else
329 code = SIGN_EXTEND;
330 temp = simplify_unary_operation (code, to_mode, x, from_mode);
331 if (temp)
332 return temp;
333 break;
498b529f 334
d1405722 335 case SUBREG:
5da4f548 336 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 337 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
338 return SUBREG_REG (x);
339 break;
340
ea534b63 341 case LABEL_REF:
04a121a7 342 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
5da4f548
SE
343 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
344 return temp;
498b529f 345
ea534b63 346 case SYMBOL_REF:
ce02ba25
EC
347 temp = shallow_copy_rtx (x);
348 PUT_MODE (temp, to_mode);
5da4f548 349 return temp;
ea534b63 350
498b529f 351 case CONST:
3d3f9e7e
JJ
352 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
353 true, no_emit);
354 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
ea534b63 355
0b04ec8c
RK
356 case PLUS:
357 case MULT:
ceeb2cbc
AP
358 /* For addition we can safely permute the conversion and addition
359 operation if one operand is a constant and converting the constant
360 does not change it or if one operand is a constant and we are
361 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
17939c98 362 We can always safely permute them if we are making the address
7745730f
AP
363 narrower. Inside a CONST RTL, this is safe for both pointers
364 zero or sign extended as pointers cannot wrap. */
aa0f70e6
SE
365 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
366 || (GET_CODE (x) == PLUS
481683e1 367 && CONST_INT_P (XEXP (x, 1))
7745730f
AP
368 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
369 || XEXP (x, 1) == convert_memory_address_addr_space_1
3d3f9e7e
JJ
370 (to_mode, XEXP (x, 1), as, in_const,
371 no_emit)
7745730f 372 || POINTERS_EXTEND_UNSIGNED < 0)))
3d3f9e7e
JJ
373 {
374 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
375 as, in_const, no_emit);
b88990be
JJ
376 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
377 temp, XEXP (x, 1))
378 : temp);
3d3f9e7e 379 }
38a448ca 380 break;
d9b3eb63 381
e8beba1c
RS
382 case UNSPEC:
383 /* Assume that all UNSPECs in a constant address can be converted
384 operand-by-operand. We could add a target hook if some targets
385 require different behavior. */
386 if (in_const && GET_MODE (x) == from_mode)
387 {
388 unsigned int n = XVECLEN (x, 0);
389 rtvec v = gen_rtvec (n);
390 for (unsigned int i = 0; i < n; ++i)
391 {
392 rtx op = XVECEXP (x, 0, i);
393 if (GET_MODE (op) == from_mode)
394 op = convert_memory_address_addr_space_1 (to_mode, op, as,
395 in_const, no_emit);
396 RTVEC_ELT (v, i) = op;
397 }
398 return gen_rtx_UNSPEC (to_mode, v, XINT (x, 1));
399 }
400 break;
401
38a448ca
RH
402 default:
403 break;
ea534b63 404 }
0b04ec8c 405
3d3f9e7e
JJ
406 if (no_emit)
407 return NULL_RTX;
408
0b04ec8c
RK
409 return convert_modes (to_mode, from_mode,
410 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 411#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 412}
7745730f
AP
413
414/* Given X, a memory address in address space AS' pointer mode, convert it to
415 an address in the address space's address mode, or vice versa (TO_MODE says
416 which way). We take advantage of the fact that pointers are not allowed to
417 overflow by commuting arithmetic operations over conversions so that address
418 arithmetic insns can be used. */
419
420rtx
095a2d76
RS
421convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
422 addr_space_t as)
7745730f 423{
3d3f9e7e 424 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
7745730f 425}
18ca7dab 426\f
36566b39 427
09e881c9
BE
428/* Return something equivalent to X but valid as a memory address for something
429 of mode MODE in the named address space AS. When X is not itself valid,
430 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
431
432rtx
ef4bddc2 433memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
18ca7dab 434{
b3694847 435 rtx oldx = x;
095a2d76 436 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 437
d4ebfa65 438 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 439
ba228239 440 /* By passing constant addresses through registers
18ca7dab 441 we get a chance to cse them. */
cabeca29 442 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 443 x = force_reg (address_mode, x);
18ca7dab 444
18ca7dab
RK
445 /* We get better cse by rejecting indirect addressing at this stage.
446 Let the combiner create indirect addresses where appropriate.
447 For now, generate the code so that the subexpressions useful to share
448 are visible. But not if cse won't be done! */
18b9ca6f 449 else
18ca7dab 450 {
f8cfc6aa 451 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
452 x = break_out_memory_refs (x);
453
454 /* At this point, any valid address is accepted. */
09e881c9 455 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 456 goto done;
18b9ca6f
RK
457
458 /* If it was valid before but breaking out memory refs invalidated it,
459 use it the old way. */
09e881c9 460 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
461 {
462 x = oldx;
463 goto done;
464 }
18b9ca6f
RK
465
466 /* Perform machine-dependent transformations on X
467 in certain cases. This is not necessary since the code
468 below can handle all possible cases, but machine-dependent
469 transformations can make better code. */
506d7b68 470 {
09e881c9
BE
471 rtx orig_x = x;
472 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
473 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
474 goto done;
475 }
18b9ca6f
RK
476
477 /* PLUS and MULT can appear in special ways
478 as the result of attempts to make an address usable for indexing.
479 Usually they are dealt with by calling force_operand, below.
480 But a sum containing constant terms is special
481 if removing them makes the sum a valid address:
482 then we generate that address in a register
483 and index off of it. We do this because it often makes
484 shorter code, and because the addresses thus generated
485 in registers often become common subexpressions. */
486 if (GET_CODE (x) == PLUS)
487 {
488 rtx constant_term = const0_rtx;
489 rtx y = eliminate_constant_term (x, &constant_term);
490 if (constant_term == const0_rtx
09e881c9 491 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
492 x = force_operand (x, NULL_RTX);
493 else
494 {
38a448ca 495 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 496 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
497 x = force_operand (x, NULL_RTX);
498 else
499 x = y;
500 }
501 }
18ca7dab 502
e475ed2a 503 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 504 x = force_operand (x, NULL_RTX);
18ca7dab 505
18b9ca6f
RK
506 /* If we have a register that's an invalid address,
507 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 508 else if (REG_P (x))
18b9ca6f
RK
509 x = copy_to_reg (x);
510
511 /* Last resort: copy the value to a register, since
512 the register is a valid address. */
513 else
d4ebfa65 514 x = force_reg (address_mode, x);
18ca7dab 515 }
18b9ca6f
RK
516
517 done:
518
09e881c9 519 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
520 /* If we didn't change the address, we are done. Otherwise, mark
521 a reg as a pointer if we have REG or REG + CONST_INT. */
522 if (oldx == x)
523 return x;
f8cfc6aa 524 else if (REG_P (x))
bdb429a5 525 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 526 else if (GET_CODE (x) == PLUS
f8cfc6aa 527 && REG_P (XEXP (x, 0))
481683e1 528 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 529 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 530
18b9ca6f
RK
531 /* OLDX may have been the address on a temporary. Update the address
532 to indicate that X is now used. */
533 update_temp_slot_address (oldx, x);
534
18ca7dab
RK
535 return x;
536}
537
b8105705
EB
538/* Convert a mem ref into one with a valid memory address.
539 Pass through anything else unchanged. */
18ca7dab
RK
540
541rtx
502b8322 542validize_mem (rtx ref)
18ca7dab 543{
3c0cb5de 544 if (!MEM_P (ref))
18ca7dab 545 return ref;
aacd3885 546 ref = use_anchored_address (ref);
09e881c9
BE
547 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
548 MEM_ADDR_SPACE (ref)))
18ca7dab 549 return ref;
792760b9 550
b8105705
EB
551 /* Don't alter REF itself, since that is probably a stack slot. */
552 return replace_equiv_address (ref, XEXP (ref, 0));
18ca7dab 553}
aacd3885
RS
554
555/* If X is a memory reference to a member of an object block, try rewriting
556 it to use an anchor instead. Return the new memory reference on success
557 and the old one on failure. */
558
559rtx
560use_anchored_address (rtx x)
561{
562 rtx base;
563 HOST_WIDE_INT offset;
ef4bddc2 564 machine_mode mode;
aacd3885
RS
565
566 if (!flag_section_anchors)
567 return x;
568
569 if (!MEM_P (x))
570 return x;
571
572 /* Split the address into a base and offset. */
573 base = XEXP (x, 0);
574 offset = 0;
575 if (GET_CODE (base) == CONST
576 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 577 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
578 {
579 offset += INTVAL (XEXP (XEXP (base, 0), 1));
580 base = XEXP (XEXP (base, 0), 0);
581 }
582
583 /* Check whether BASE is suitable for anchors. */
584 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 585 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 586 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 587 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
588 || !targetm.use_anchors_for_symbol_p (base))
589 return x;
590
591 /* Decide where BASE is going to be. */
592 place_block_symbol (base);
593
594 /* Get the anchor we need to use. */
595 offset += SYMBOL_REF_BLOCK_OFFSET (base);
596 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
597 SYMBOL_REF_TLS_MODEL (base));
598
599 /* Work out the offset from the anchor. */
600 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
601
602 /* If we're going to run a CSE pass, force the anchor into a register.
603 We will then be able to reuse registers for several accesses, if the
604 target costs say that that's worthwhile. */
0a81f074 605 mode = GET_MODE (base);
aacd3885 606 if (!cse_not_expected)
0a81f074 607 base = force_reg (mode, base);
aacd3885 608
0a81f074 609 return replace_equiv_address (x, plus_constant (mode, base, offset));
aacd3885 610}
18ca7dab 611\f
18ca7dab
RK
612/* Copy the value or contents of X to a new temp reg and return that reg. */
613
614rtx
502b8322 615copy_to_reg (rtx x)
18ca7dab 616{
b3694847 617 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 618
18ca7dab 619 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 620 do the computation. */
18ca7dab
RK
621 if (! general_operand (x, VOIDmode))
622 x = force_operand (x, temp);
d9b3eb63 623
18ca7dab
RK
624 if (x != temp)
625 emit_move_insn (temp, x);
626
627 return temp;
628}
629
630/* Like copy_to_reg but always give the new register mode Pmode
631 in case X is a constant. */
632
633rtx
502b8322 634copy_addr_to_reg (rtx x)
18ca7dab
RK
635{
636 return copy_to_mode_reg (Pmode, x);
637}
638
639/* Like copy_to_reg but always give the new register mode MODE
640 in case X is a constant. */
641
642rtx
ef4bddc2 643copy_to_mode_reg (machine_mode mode, rtx x)
18ca7dab 644{
b3694847 645 rtx temp = gen_reg_rtx (mode);
d9b3eb63 646
18ca7dab 647 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 648 do the computation. */
18ca7dab
RK
649 if (! general_operand (x, VOIDmode))
650 x = force_operand (x, temp);
651
5b0264cb 652 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
653 if (x != temp)
654 emit_move_insn (temp, x);
655 return temp;
656}
657
658/* Load X into a register if it is not already one.
659 Use mode MODE for the register.
660 X should be valid for mode MODE, but it may be a constant which
661 is valid for all integer modes; that's why caller must specify MODE.
662
663 The caller must not alter the value in the register we return,
664 since we mark it as a "constant" register. */
665
666rtx
ef4bddc2 667force_reg (machine_mode mode, rtx x)
18ca7dab 668{
528a80c1
DM
669 rtx temp, set;
670 rtx_insn *insn;
18ca7dab 671
f8cfc6aa 672 if (REG_P (x))
18ca7dab 673 return x;
d9b3eb63 674
e3c8ea67
RH
675 if (general_operand (x, mode))
676 {
677 temp = gen_reg_rtx (mode);
678 insn = emit_move_insn (temp, x);
679 }
680 else
681 {
682 temp = force_operand (x, NULL_RTX);
f8cfc6aa 683 if (REG_P (temp))
e3c8ea67
RH
684 insn = get_last_insn ();
685 else
686 {
687 rtx temp2 = gen_reg_rtx (mode);
688 insn = emit_move_insn (temp2, temp);
689 temp = temp2;
690 }
691 }
62874575 692
18ca7dab 693 /* Let optimizers know that TEMP's value never changes
62874575
RK
694 and that X can be substituted for it. Don't get confused
695 if INSN set something else (such as a SUBREG of TEMP). */
696 if (CONSTANT_P (x)
697 && (set = single_set (insn)) != 0
fd7acc30
RS
698 && SET_DEST (set) == temp
699 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 700 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 701
4a4f95d9
RH
702 /* Let optimizers know that TEMP is a pointer, and if so, the
703 known alignment of that pointer. */
704 {
705 unsigned align = 0;
706 if (GET_CODE (x) == SYMBOL_REF)
707 {
708 align = BITS_PER_UNIT;
709 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
710 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
711 }
712 else if (GET_CODE (x) == LABEL_REF)
713 align = BITS_PER_UNIT;
714 else if (GET_CODE (x) == CONST
715 && GET_CODE (XEXP (x, 0)) == PLUS
716 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 717 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
718 {
719 rtx s = XEXP (XEXP (x, 0), 0);
720 rtx c = XEXP (XEXP (x, 0), 1);
721 unsigned sa, ca;
722
723 sa = BITS_PER_UNIT;
724 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
725 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
726
bd95721f
RH
727 if (INTVAL (c) == 0)
728 align = sa;
729 else
730 {
731 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
732 align = MIN (sa, ca);
733 }
4a4f95d9
RH
734 }
735
0a317111 736 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
737 mark_reg_pointer (temp, align);
738 }
739
18ca7dab
RK
740 return temp;
741}
742
743/* If X is a memory ref, copy its contents to a new temp reg and return
744 that reg. Otherwise, return X. */
745
746rtx
502b8322 747force_not_mem (rtx x)
18ca7dab 748{
b3694847 749 rtx temp;
fe3439b0 750
3c0cb5de 751 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 752 return x;
fe3439b0 753
18ca7dab 754 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
755
756 if (MEM_POINTER (x))
757 REG_POINTER (temp) = 1;
758
18ca7dab
RK
759 emit_move_insn (temp, x);
760 return temp;
761}
762
763/* Copy X to TARGET (if it's nonzero and a reg)
764 or to a new temp reg and return that reg.
765 MODE is the mode to use for X in case it is a constant. */
766
767rtx
ef4bddc2 768copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
18ca7dab 769{
b3694847 770 rtx temp;
18ca7dab 771
f8cfc6aa 772 if (target && REG_P (target))
18ca7dab
RK
773 temp = target;
774 else
775 temp = gen_reg_rtx (mode);
776
777 emit_move_insn (temp, x);
778 return temp;
779}
780\f
cde0f3fd 781/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
782 PUNSIGNEDP points to the signedness of the type and may be adjusted
783 to show what signedness to use on extension operations.
784
cde0f3fd
PB
785 FOR_RETURN is nonzero if the caller is promoting the return value
786 of FNDECL, else it is for promoting args. */
9ff65789 787
ef4bddc2
RS
788machine_mode
789promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
cde0f3fd
PB
790 const_tree funtype, int for_return)
791{
5e617be8
AK
792 /* Called without a type node for a libcall. */
793 if (type == NULL_TREE)
794 {
795 if (INTEGRAL_MODE_P (mode))
796 return targetm.calls.promote_function_mode (NULL_TREE, mode,
797 punsignedp, funtype,
798 for_return);
799 else
800 return mode;
801 }
802
cde0f3fd
PB
803 switch (TREE_CODE (type))
804 {
805 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
806 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
807 case POINTER_TYPE: case REFERENCE_TYPE:
808 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
809 for_return);
810
811 default:
812 return mode;
813 }
814}
815/* Return the mode to use to store a scalar of TYPE and MODE.
816 PUNSIGNEDP points to the signedness of the type and may be adjusted
817 to show what signedness to use on extension operations. */
d4453b7a 818
ef4bddc2
RS
819machine_mode
820promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
b1680483 821 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 822{
1e3287d0
RG
823#ifdef PROMOTE_MODE
824 enum tree_code code;
825 int unsignedp;
79d22165 826 scalar_mode smode;
1e3287d0
RG
827#endif
828
5e617be8
AK
829 /* For libcalls this is invoked without TYPE from the backends
830 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
831 case. */
832 if (type == NULL_TREE)
833 return mode;
834
cde0f3fd
PB
835 /* FIXME: this is the same logic that was there until GCC 4.4, but we
836 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
837 is not defined. The affected targets are M32C, S390, SPARC. */
838#ifdef PROMOTE_MODE
1e3287d0
RG
839 code = TREE_CODE (type);
840 unsignedp = *punsignedp;
9ff65789 841
9ff65789
RK
842 switch (code)
843 {
9ff65789 844 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 845 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
79d22165
RS
846 /* Values of these types always have scalar mode. */
847 smode = as_a <scalar_mode> (mode);
848 PROMOTE_MODE (smode, unsignedp, type);
cde0f3fd 849 *punsignedp = unsignedp;
79d22165 850 return smode;
9ff65789 851
ea534b63 852#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 853 case REFERENCE_TYPE:
9ff65789 854 case POINTER_TYPE:
cde0f3fd 855 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
856 return targetm.addr_space.address_mode
857 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
ea534b63 858#endif
d9b3eb63 859
38a448ca 860 default:
cde0f3fd 861 return mode;
9ff65789 862 }
cde0f3fd 863#else
9ff65789 864 return mode;
cde0f3fd 865#endif
9ff65789 866}
cde0f3fd
PB
867
868
869/* Use one of promote_mode or promote_function_mode to find the promoted
870 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
871 of DECL after promotion. */
872
ef4bddc2 873machine_mode
cde0f3fd
PB
874promote_decl_mode (const_tree decl, int *punsignedp)
875{
876 tree type = TREE_TYPE (decl);
877 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
878 machine_mode mode = DECL_MODE (decl);
879 machine_mode pmode;
cde0f3fd 880
f11a7b6d
AO
881 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
882 pmode = promote_function_mode (type, mode, &unsignedp,
883 TREE_TYPE (current_function_decl), 1);
884 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 885 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 886 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
887 else
888 pmode = promote_mode (type, mode, &unsignedp);
889
890 if (punsignedp)
891 *punsignedp = unsignedp;
892 return pmode;
893}
894
1f9ceff1
AO
895/* Return the promoted mode for name. If it is a named SSA_NAME, it
896 is the same as promote_decl_mode. Otherwise, it is the promoted
897 mode of a temp decl of same type as the SSA_NAME, if we had created
898 one. */
899
900machine_mode
901promote_ssa_mode (const_tree name, int *punsignedp)
902{
903 gcc_assert (TREE_CODE (name) == SSA_NAME);
904
905 /* Partitions holding parms and results must be promoted as expected
e53b6e56 906 by function.cc. */
1f9ceff1
AO
907 if (SSA_NAME_VAR (name)
908 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
909 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
f11a7b6d
AO
910 {
911 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
912 if (mode != BLKmode)
913 return mode;
914 }
1f9ceff1
AO
915
916 tree type = TREE_TYPE (name);
917 int unsignedp = TYPE_UNSIGNED (type);
a59b2e42 918 machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
1f9ceff1
AO
919 if (punsignedp)
920 *punsignedp = unsignedp;
921
922 return pmode;
923}
924
925
9ff65789 926\f
9c582551 927/* Controls the behavior of {anti_,}adjust_stack. */
9a08d230
RH
928static bool suppress_reg_args_size;
929
930/* A helper for adjust_stack and anti_adjust_stack. */
931
932static void
933adjust_stack_1 (rtx adjust, bool anti_p)
934{
528a80c1
DM
935 rtx temp;
936 rtx_insn *insn;
9a08d230 937
9a08d230 938 /* Hereafter anti_p means subtract_p. */
581edfa3
TS
939 if (!STACK_GROWS_DOWNWARD)
940 anti_p = !anti_p;
9a08d230
RH
941
942 temp = expand_binop (Pmode,
943 anti_p ? sub_optab : add_optab,
944 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
945 OPTAB_LIB_WIDEN);
946
947 if (temp != stack_pointer_rtx)
948 insn = emit_move_insn (stack_pointer_rtx, temp);
949 else
950 {
951 insn = get_last_insn ();
952 temp = single_set (insn);
953 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
954 }
955
956 if (!suppress_reg_args_size)
68184180 957 add_args_size_note (insn, stack_pointer_delta);
9a08d230
RH
958}
959
18ca7dab
RK
960/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
961 This pops when ADJUST is positive. ADJUST need not be constant. */
962
963void
502b8322 964adjust_stack (rtx adjust)
18ca7dab 965{
18ca7dab
RK
966 if (adjust == const0_rtx)
967 return;
968
1503a7ec
JH
969 /* We expect all variable sized adjustments to be multiple of
970 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
971 poly_int64 const_adjust;
972 if (poly_int_rtx_p (adjust, &const_adjust))
973 stack_pointer_delta -= const_adjust;
1503a7ec 974
9a08d230 975 adjust_stack_1 (adjust, false);
18ca7dab
RK
976}
977
978/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
979 This pushes when ADJUST is positive. ADJUST need not be constant. */
980
981void
502b8322 982anti_adjust_stack (rtx adjust)
18ca7dab 983{
18ca7dab
RK
984 if (adjust == const0_rtx)
985 return;
986
1503a7ec
JH
987 /* We expect all variable sized adjustments to be multiple of
988 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
989 poly_int64 const_adjust;
990 if (poly_int_rtx_p (adjust, &const_adjust))
991 stack_pointer_delta += const_adjust;
1503a7ec 992
9a08d230 993 adjust_stack_1 (adjust, true);
18ca7dab
RK
994}
995
996/* Round the size of a block to be pushed up to the boundary required
997 by this machine. SIZE is the desired size, which need not be constant. */
998
4dd9b044 999static rtx
502b8322 1000round_push (rtx size)
18ca7dab 1001{
32990d5b 1002 rtx align_rtx, alignm1_rtx;
41ee3204 1003
32990d5b
JJ
1004 if (!SUPPORTS_STACK_ALIGNMENT
1005 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 1006 {
32990d5b
JJ
1007 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1008
1009 if (align == 1)
1010 return size;
1011
1012 if (CONST_INT_P (size))
1013 {
1014 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 1015
32990d5b
JJ
1016 if (INTVAL (size) != new_size)
1017 size = GEN_INT (new_size);
1018 return size;
1019 }
1020
1021 align_rtx = GEN_INT (align);
1022 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
1023 }
1024 else
1025 {
32990d5b
JJ
1026 /* If crtl->preferred_stack_boundary might still grow, use
1027 virtual_preferred_stack_boundary_rtx instead. This will be
1028 substituted by the right value in vregs pass and optimized
1029 during combine. */
1030 align_rtx = virtual_preferred_stack_boundary_rtx;
0a81f074
RS
1031 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1032 NULL_RTX);
18ca7dab 1033 }
41ee3204 1034
32990d5b
JJ
1035 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1036 but we know it can't. So add ourselves and then do
1037 TRUNC_DIV_EXPR. */
1038 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1039 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1040 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1041 NULL_RTX, 1);
1042 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1043
18ca7dab
RK
1044 return size;
1045}
1046\f
59257ff7
RK
1047/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1048 to a previously-created save area. If no save area has been allocated,
1049 this function will allocate one. If a save area is specified, it
9eac0f2a 1050 must be of the proper mode. */
59257ff7
RK
1051
1052void
9eac0f2a 1053emit_stack_save (enum save_level save_level, rtx *psave)
59257ff7
RK
1054{
1055 rtx sa = *psave;
1056 /* The default is that we use a move insn and save in a Pmode object. */
4476e1a0 1057 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
ef4bddc2 1058 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
1059
1060 /* See if this machine has anything special to do for this kind of save. */
1061 switch (save_level)
1062 {
59257ff7 1063 case SAVE_BLOCK:
4476e1a0
RS
1064 if (targetm.have_save_stack_block ())
1065 fcn = targetm.gen_save_stack_block;
59257ff7 1066 break;
59257ff7 1067 case SAVE_FUNCTION:
4476e1a0
RS
1068 if (targetm.have_save_stack_function ())
1069 fcn = targetm.gen_save_stack_function;
59257ff7 1070 break;
59257ff7 1071 case SAVE_NONLOCAL:
4476e1a0
RS
1072 if (targetm.have_save_stack_nonlocal ())
1073 fcn = targetm.gen_save_stack_nonlocal;
59257ff7 1074 break;
38a448ca
RH
1075 default:
1076 break;
59257ff7
RK
1077 }
1078
1079 /* If there is no save area and we have to allocate one, do so. Otherwise
1080 verify the save area is the proper mode. */
1081
1082 if (sa == 0)
1083 {
1084 if (mode != VOIDmode)
1085 {
1086 if (save_level == SAVE_NONLOCAL)
1087 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1088 else
1089 *psave = sa = gen_reg_rtx (mode);
1090 }
1091 }
59257ff7 1092
9eac0f2a
RH
1093 do_pending_stack_adjust ();
1094 if (sa != 0)
1095 sa = validize_mem (sa);
1096 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
1097}
1098
1099/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
9eac0f2a 1100 area made by emit_stack_save. If it is zero, we have nothing to do. */
59257ff7
RK
1101
1102void
9eac0f2a 1103emit_stack_restore (enum save_level save_level, rtx sa)
59257ff7
RK
1104{
1105 /* The default is that we use a move insn. */
4476e1a0 1106 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
59257ff7 1107
50025f91
TV
1108 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1109 STACK_POINTER and HARD_FRAME_POINTER.
1110 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1111 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1112 aligned variables, which is reflected in ix86_can_eliminate.
1113 We normally still have the realigned STACK_POINTER that we can use.
1114 But if there is a stack restore still present at reload, it can trigger
1115 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1116 FRAME_POINTER into a hard reg.
1117 To prevent this situation, we force need_drap if we emit a stack
1118 restore. */
1119 if (SUPPORTS_STACK_ALIGNMENT)
1120 crtl->need_drap = true;
1121
59257ff7
RK
1122 /* See if this machine has anything special to do for this kind of save. */
1123 switch (save_level)
1124 {
59257ff7 1125 case SAVE_BLOCK:
4476e1a0
RS
1126 if (targetm.have_restore_stack_block ())
1127 fcn = targetm.gen_restore_stack_block;
59257ff7 1128 break;
59257ff7 1129 case SAVE_FUNCTION:
4476e1a0
RS
1130 if (targetm.have_restore_stack_function ())
1131 fcn = targetm.gen_restore_stack_function;
59257ff7 1132 break;
59257ff7 1133 case SAVE_NONLOCAL:
4476e1a0
RS
1134 if (targetm.have_restore_stack_nonlocal ())
1135 fcn = targetm.gen_restore_stack_nonlocal;
59257ff7 1136 break;
38a448ca
RH
1137 default:
1138 break;
59257ff7
RK
1139 }
1140
d072107f 1141 if (sa != 0)
260f91c2
DJ
1142 {
1143 sa = validize_mem (sa);
1144 /* These clobbers prevent the scheduler from moving
1145 references to variable arrays below the code
4b7e68e7 1146 that deletes (pops) the arrays. */
c41c1387
RS
1147 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1148 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1149 }
d072107f 1150
a494ed43
EB
1151 discard_pending_stack_adjust ();
1152
9eac0f2a 1153 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1154}
6de9cd9a
DN
1155
1156/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
d33606c3
EB
1157 function. This should be called whenever we allocate or deallocate
1158 dynamic stack space. */
6de9cd9a
DN
1159
1160void
1161update_nonlocal_goto_save_area (void)
1162{
1163 tree t_save;
1164 rtx r_save;
1165
1166 /* The nonlocal_goto_save_area object is an array of N pointers. The
1167 first one is used for the frame pointer save; the rest are sized by
1168 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1169 of the stack save area slots. */
6bbec3e1
L
1170 t_save = build4 (ARRAY_REF,
1171 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1172 cfun->nonlocal_goto_save_area,
3244e67d 1173 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1174 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1175
9eac0f2a 1176 emit_stack_save (SAVE_NONLOCAL, &r_save);
6de9cd9a 1177}
d33606c3
EB
1178
1179/* Record a new stack level for the current function. This should be called
1180 whenever we allocate or deallocate dynamic stack space. */
1181
1182void
1183record_new_stack_level (void)
1184{
1185 /* Record the new stack level for nonlocal gotos. */
1186 if (cfun->nonlocal_goto_save_area)
1187 update_nonlocal_goto_save_area ();
1188
1189 /* Record the new stack level for SJLJ exceptions. */
1190 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1191 update_sjlj_context ();
1192}
0358d788 1193
7072df0a 1194/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
0358d788
RL
1195
1196rtx
7072df0a
DV
1197align_dynamic_address (rtx target, unsigned required_align)
1198{
1199 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1200 but we know it can't. So add ourselves and then do
1201 TRUNC_DIV_EXPR. */
1202 target = expand_binop (Pmode, add_optab, target,
1203 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1204 Pmode),
1205 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1206 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1207 gen_int_mode (required_align / BITS_PER_UNIT,
1208 Pmode),
1209 NULL_RTX, 1);
1210 target = expand_mult (Pmode, target,
1211 gen_int_mode (required_align / BITS_PER_UNIT,
1212 Pmode),
1213 NULL_RTX, 1);
18ca7dab 1214
7072df0a
DV
1215 return target;
1216}
18ca7dab 1217
7072df0a
DV
1218/* Return an rtx through *PSIZE, representing the size of an area of memory to
1219 be dynamically pushed on the stack.
1220
1221 *PSIZE is an rtx representing the size of the area.
091ad0b9 1222
3a42502d 1223 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
7072df0a 1224 parameter may be zero. If so, a proper value will be extracted
3a42502d
RH
1225 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1226
1227 REQUIRED_ALIGN is the alignment (in bits) required for the region
1228 of memory.
d3c12306 1229
7072df0a
DV
1230 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1231 the additional size returned. */
1232void
1233get_dynamic_stack_size (rtx *psize, unsigned size_align,
1234 unsigned required_align,
1235 HOST_WIDE_INT *pstack_usage_size)
18ca7dab 1236{
7072df0a 1237 rtx size = *psize;
d3c12306 1238
18ca7dab
RK
1239 /* Ensure the size is in the proper mode. */
1240 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1241 size = convert_to_mode (Pmode, size, 1);
1242
3a42502d
RH
1243 if (CONST_INT_P (size))
1244 {
1245 unsigned HOST_WIDE_INT lsb;
1246
1247 lsb = INTVAL (size);
1248 lsb &= -lsb;
1249
1250 /* Watch out for overflow truncating to "unsigned". */
1251 if (lsb > UINT_MAX / BITS_PER_UNIT)
1252 size_align = 1u << (HOST_BITS_PER_INT - 1);
1253 else
1254 size_align = (unsigned)lsb * BITS_PER_UNIT;
1255 }
1256 else if (size_align < BITS_PER_UNIT)
1257 size_align = BITS_PER_UNIT;
1258
34831f3e
RH
1259 /* We can't attempt to minimize alignment necessary, because we don't
1260 know the final value of preferred_stack_boundary yet while executing
1261 this code. */
1262 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1263 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1264
18ca7dab 1265 /* We will need to ensure that the address we return is aligned to
4fc0c9c8 1266 REQUIRED_ALIGN. At this point in the compilation, we don't always
e53b6e56 1267 know the final value of the STACK_DYNAMIC_OFFSET used in function.cc
4fc0c9c8
DV
1268 (it might depend on the size of the outgoing parameter lists, for
1269 example), so we must preventively align the value. We leave space
1270 in SIZE for the hole that might result from the alignment operation. */
1ecad98e 1271
b9f92c0b
EB
1272 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1273 if (known_align == 0)
1274 known_align = BITS_PER_UNIT;
1275 if (required_align > known_align)
ae85ad3a 1276 {
b9f92c0b 1277 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
ae85ad3a
WD
1278 size = plus_constant (Pmode, size, extra);
1279 size = force_operand (size, NULL_RTX);
b9f92c0b
EB
1280 if (size_align > known_align)
1281 size_align = known_align;
ae85ad3a
WD
1282
1283 if (flag_stack_usage_info && pstack_usage_size)
1284 *pstack_usage_size += extra;
1285 }
1d9d04f8 1286
18ca7dab 1287 /* Round the size to a multiple of the required stack alignment.
1135a133 1288 Since the stack is presumed to be rounded before this allocation,
18ca7dab
RK
1289 this will maintain the required alignment.
1290
1291 If the stack grows downward, we could save an insn by subtracting
1292 SIZE from the stack pointer and then aligning the stack pointer.
1293 The problem with this is that the stack pointer may be unaligned
1294 between the execution of the subtraction and alignment insns and
1295 some machines do not allow this. Even on those that do, some
1296 signal handlers malfunction if a signal should occur between those
1297 insns. Since this is an extremely rare event, we have no reliable
1298 way of knowing which systems have this problem. So we avoid even
1299 momentarily mis-aligning the stack. */
3a42502d 1300 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1301 {
1302 size = round_push (size);
18ca7dab 1303
7072df0a 1304 if (flag_stack_usage_info && pstack_usage_size)
d3c12306 1305 {
32990d5b 1306 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
7072df0a
DV
1307 *pstack_usage_size =
1308 (*pstack_usage_size + align - 1) / align * align;
d3c12306
EB
1309 }
1310 }
1311
7072df0a
DV
1312 *psize = size;
1313}
1314
8c1dd970
JL
1315/* Return the number of bytes to "protect" on the stack for -fstack-check.
1316
f569026a
EB
1317 "protect" in the context of -fstack-check means how many bytes we need
1318 to always ensure are available on the stack; as a consequence, this is
1319 also how many bytes are first skipped when probing the stack.
8c1dd970
JL
1320
1321 On some targets we want to reuse the -fstack-check prologue support
1322 to give a degree of protection against stack clashing style attacks.
1323
1324 In that scenario we do not want to skip bytes before probing as that
1325 would render the stack clash protections useless.
1326
f569026a
EB
1327 So we never use STACK_CHECK_PROTECT directly. Instead we indirectly
1328 use it through this helper, which allows to provide different values
1329 for -fstack-check and -fstack-clash-protection. */
1330
8c1dd970
JL
1331HOST_WIDE_INT
1332get_stack_check_protect (void)
1333{
1334 if (flag_stack_clash_protection)
1335 return 0;
f569026a 1336
8c1dd970
JL
1337 return STACK_CHECK_PROTECT;
1338}
1339
7072df0a
DV
1340/* Return an rtx representing the address of an area of memory dynamically
1341 pushed on the stack.
1342
1343 Any required stack pointer alignment is preserved.
1344
1345 SIZE is an rtx representing the size of the area.
1346
1347 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1348 parameter may be zero. If so, a proper value will be extracted
1349 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1350
1351 REQUIRED_ALIGN is the alignment (in bits) required for the region
1352 of memory.
1353
9e878cf1
EB
1354 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1355 no such upper bound is known.
1356
7072df0a
DV
1357 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1358 stack space allocated by the generated code cannot be added with itself
1359 in the course of the execution of the function. It is always safe to
1360 pass FALSE here and the following criterion is sufficient in order to
1361 pass TRUE: every path in the CFG that starts at the allocation point and
1362 loops to it executes the associated deallocation code. */
1363
1364rtx
1365allocate_dynamic_stack_space (rtx size, unsigned size_align,
9e878cf1
EB
1366 unsigned required_align,
1367 HOST_WIDE_INT max_size,
1368 bool cannot_accumulate)
7072df0a
DV
1369{
1370 HOST_WIDE_INT stack_usage_size = -1;
1371 rtx_code_label *final_label;
1372 rtx final_target, target;
1373
1374 /* If we're asking for zero bytes, it doesn't matter what we point
1375 to since we can't dereference it. But return a reasonable
1376 address anyway. */
1377 if (size == const0_rtx)
1378 return virtual_stack_dynamic_rtx;
1379
1380 /* Otherwise, show we're calling alloca or equivalent. */
1381 cfun->calls_alloca = 1;
1382
1383 /* If stack usage info is requested, look into the size we are passed.
1384 We need to do so this early to avoid the obfuscation that may be
1385 introduced later by the various alignment operations. */
1386 if (flag_stack_usage_info)
1387 {
1388 if (CONST_INT_P (size))
1389 stack_usage_size = INTVAL (size);
1390 else if (REG_P (size))
1391 {
1392 /* Look into the last emitted insn and see if we can deduce
1393 something for the register. */
1394 rtx_insn *insn;
1395 rtx set, note;
1396 insn = get_last_insn ();
1397 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1398 {
1399 if (CONST_INT_P (SET_SRC (set)))
1400 stack_usage_size = INTVAL (SET_SRC (set));
1401 else if ((note = find_reg_equal_equiv_note (insn))
1402 && CONST_INT_P (XEXP (note, 0)))
1403 stack_usage_size = INTVAL (XEXP (note, 0));
1404 }
1405 }
1406
9e878cf1
EB
1407 /* If the size is not constant, try the maximum size. */
1408 if (stack_usage_size < 0)
1409 stack_usage_size = max_size;
1410
1411 /* If the size is still not constant, we can't say anything. */
1412 if (stack_usage_size < 0)
7072df0a
DV
1413 {
1414 current_function_has_unbounded_dynamic_stack_size = 1;
1415 stack_usage_size = 0;
1416 }
1417 }
1418
1419 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1420
3a42502d 1421 target = gen_reg_rtx (Pmode);
7458026b 1422
d3c12306
EB
1423 /* The size is supposed to be fully adjusted at this point so record it
1424 if stack usage info is requested. */
a11e0df4 1425 if (flag_stack_usage_info)
d3c12306
EB
1426 {
1427 current_function_dynamic_stack_size += stack_usage_size;
1428
1429 /* ??? This is gross but the only safe stance in the absence
1430 of stack usage oriented flow analysis. */
1431 if (!cannot_accumulate)
1432 current_function_has_unbounded_dynamic_stack_size = 1;
1433 }
18ca7dab 1434
1c84b798
ILT
1435 do_pending_stack_adjust ();
1436
528a80c1 1437 final_label = NULL;
7458026b
ILT
1438 final_target = NULL_RTX;
1439
1440 /* If we are splitting the stack, we need to ask the backend whether
1441 there is enough room on the current stack. If there isn't, or if
1442 the backend doesn't know how to tell is, then we need to call a
1443 function to allocate memory in some other way. This memory will
1444 be released when we release the current stack segment. The
1445 effect is that stack allocation becomes less efficient, but at
1446 least it doesn't cause a stack overflow. */
1447 if (flag_split_stack)
1448 {
528a80c1
DM
1449 rtx_code_label *available_label;
1450 rtx ask, space, func;
7458026b 1451
528a80c1 1452 available_label = NULL;
7458026b 1453
10169a8b 1454 if (targetm.have_split_stack_space_check ())
7458026b
ILT
1455 {
1456 available_label = gen_label_rtx ();
1457
1458 /* This instruction will branch to AVAILABLE_LABEL if there
1459 are SIZE bytes available on the stack. */
10169a8b
RS
1460 emit_insn (targetm.gen_split_stack_space_check
1461 (size, available_label));
7458026b 1462 }
7458026b 1463
c3928dde 1464 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1465 memory using malloc. If the alignment of the memory returned
1466 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1467 make sure we allocate enough space. */
1468 if (MALLOC_ABI_ALIGNMENT >= required_align)
1469 ask = size;
1470 else
4fc0c9c8
DV
1471 ask = expand_binop (Pmode, add_optab, size,
1472 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1473 Pmode),
1474 NULL_RTX, 1, OPTAB_LIB_WIDEN);
c3928dde 1475
7458026b
ILT
1476 func = init_one_libfunc ("__morestack_allocate_stack_space");
1477
1478 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
db69559b 1479 ask, Pmode);
7458026b
ILT
1480
1481 if (available_label == NULL_RTX)
1482 return space;
1483
1484 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1485
1486 emit_move_insn (final_target, space);
1487
1488 final_label = gen_label_rtx ();
1489 emit_jump (final_label);
1490
1491 emit_label (available_label);
1492 }
1493
1503a7ec 1494 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1495 properly. */
a20c5714
RS
1496 gcc_assert (multiple_p (stack_pointer_delta,
1497 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1503a7ec 1498
d809253a
EB
1499 /* If needed, check that we have the required amount of stack. Take into
1500 account what has already been checked. */
1501 if (STACK_CHECK_MOVING_SP)
1502 ;
1503 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1504 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1505 size);
1506 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
8c1dd970 1507 probe_stack_range (get_stack_check_protect (), size);
edff2491 1508
efec771a
RH
1509 /* Don't let anti_adjust_stack emit notes. */
1510 suppress_reg_args_size = true;
1511
18ca7dab
RK
1512 /* Perform the required allocation from the stack. Some systems do
1513 this differently than simply incrementing/decrementing from the
38a448ca 1514 stack pointer, such as acquiring the space by calling malloc(). */
10169a8b 1515 if (targetm.have_allocate_stack ())
18ca7dab 1516 {
99b1c316 1517 class expand_operand ops[2];
4b6c1672
RK
1518 /* We don't have to check against the predicate for operand 0 since
1519 TARGET is known to be a pseudo of the proper mode, which must
a5c7d693
RS
1520 be valid for the operand. */
1521 create_fixed_operand (&ops[0], target);
1522 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
10169a8b 1523 expand_insn (targetm.code_for_allocate_stack, 2, ops);
18ca7dab
RK
1524 }
1525 else
ea534b63 1526 {
a20c5714 1527 poly_int64 saved_stack_pointer_delta;
32990d5b 1528
581edfa3
TS
1529 if (!STACK_GROWS_DOWNWARD)
1530 emit_move_insn (target, virtual_stack_dynamic_rtx);
a157febd
GK
1531
1532 /* Check stack bounds if necessary. */
e3b5732b 1533 if (crtl->limit_stack)
a157febd
GK
1534 {
1535 rtx available;
528a80c1 1536 rtx_code_label *space_available = gen_label_rtx ();
581edfa3
TS
1537 if (STACK_GROWS_DOWNWARD)
1538 available = expand_binop (Pmode, sub_optab,
1539 stack_pointer_rtx, stack_limit_rtx,
1540 NULL_RTX, 1, OPTAB_WIDEN);
1541 else
1542 available = expand_binop (Pmode, sub_optab,
1543 stack_limit_rtx, stack_pointer_rtx,
1544 NULL_RTX, 1, OPTAB_WIDEN);
1545
a157febd 1546 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1547 space_available);
eb6f47fb
RS
1548 if (targetm.have_trap ())
1549 emit_insn (targetm.gen_trap ());
a157febd 1550 else
a157febd
GK
1551 error ("stack limits not supported on this target");
1552 emit_barrier ();
1553 emit_label (space_available);
1554 }
1555
32990d5b 1556 saved_stack_pointer_delta = stack_pointer_delta;
9a08d230 1557
f569026a
EB
1558 /* If stack checking or stack clash protection is requested,
1559 then probe the stack while allocating space from it. */
d809253a 1560 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1561 anti_adjust_stack_and_probe (size, false);
8c1dd970
JL
1562 else if (flag_stack_clash_protection)
1563 anti_adjust_stack_and_probe_stack_clash (size);
d809253a
EB
1564 else
1565 anti_adjust_stack (size);
9a08d230 1566
32990d5b
JJ
1567 /* Even if size is constant, don't modify stack_pointer_delta.
1568 The constant size alloca should preserve
1569 crtl->preferred_stack_boundary alignment. */
1570 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1571
581edfa3
TS
1572 if (STACK_GROWS_DOWNWARD)
1573 emit_move_insn (target, virtual_stack_dynamic_rtx);
38a448ca 1574 }
18ca7dab 1575
efec771a
RH
1576 suppress_reg_args_size = false;
1577
3a42502d
RH
1578 /* Finish up the split stack handling. */
1579 if (final_label != NULL_RTX)
1580 {
1581 gcc_assert (flag_split_stack);
1582 emit_move_insn (final_target, target);
1583 emit_label (final_label);
1584 target = final_target;
1585 }
1586
7072df0a 1587 target = align_dynamic_address (target, required_align);
d9b3eb63 1588
3a42502d
RH
1589 /* Now that we've committed to a return value, mark its alignment. */
1590 mark_reg_pointer (target, required_align);
1591
d33606c3
EB
1592 /* Record the new stack level. */
1593 record_new_stack_level ();
15fc0026 1594
18ca7dab
RK
1595 return target;
1596}
7072df0a
DV
1597
1598/* Return an rtx representing the address of an area of memory already
1599 statically pushed onto the stack in the virtual stack vars area. (It is
1600 assumed that the area is allocated in the function prologue.)
1601
1602 Any required stack pointer alignment is preserved.
1603
1604 OFFSET is the offset of the area into the virtual stack vars area.
1605
1606 REQUIRED_ALIGN is the alignment (in bits) required for the region
0854b584
MM
1607 of memory.
1608
1609 BASE is the rtx of the base of this virtual stack vars area.
1610 The only time this is not `virtual_stack_vars_rtx` is when tagging pointers
1611 on the stack. */
7072df0a
DV
1612
1613rtx
0854b584 1614get_dynamic_stack_base (poly_int64 offset, unsigned required_align, rtx base)
7072df0a
DV
1615{
1616 rtx target;
1617
1618 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1619 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1620
1621 target = gen_reg_rtx (Pmode);
0854b584 1622 emit_move_insn (target, base);
7072df0a
DV
1623 target = expand_binop (Pmode, add_optab, target,
1624 gen_int_mode (offset, Pmode),
1625 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1626 target = align_dynamic_address (target, required_align);
1627
1628 /* Now that we've committed to a return value, mark its alignment. */
1629 mark_reg_pointer (target, required_align);
1630
1631 return target;
1632}
18ca7dab 1633\f
d9b3eb63 1634/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1635 run-time routine to call to check the stack, so provide a mechanism for
1636 calling that routine. */
1637
e2500fed 1638static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1639
1640void
d477d1fe 1641set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1642{
d477d1fe
SB
1643 gcc_assert (stack_check_libfunc == NULL_RTX);
1644 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
97909f80
EB
1645 tree ptype
1646 = Pmode == ptr_mode
1647 ? ptr_type_node
1648 : lang_hooks.types.type_for_mode (Pmode, 1);
1649 tree ftype
1650 = build_function_type_list (void_type_node, ptype, NULL_TREE);
3cf3da88 1651 tree decl = build_decl (UNKNOWN_LOCATION, FUNCTION_DECL,
97909f80 1652 get_identifier (libfunc_name), ftype);
3cf3da88
EB
1653 DECL_EXTERNAL (decl) = 1;
1654 SET_SYMBOL_REF_DECL (stack_check_libfunc, decl);
14a774a9
RK
1655}
1656\f
edff2491
RK
1657/* Emit one stack probe at ADDRESS, an address within the stack. */
1658
260c8ba3 1659void
502b8322 1660emit_stack_probe (rtx address)
edff2491 1661{
10169a8b 1662 if (targetm.have_probe_stack_address ())
5c35bc3e 1663 {
99b1c316 1664 class expand_operand ops[1];
5c35bc3e
KT
1665 insn_code icode = targetm.code_for_probe_stack_address;
1666 create_address_operand (ops, address);
1667 maybe_legitimize_operands (icode, 0, 1, ops);
1668 expand_insn (icode, 1, ops);
1669 }
7b84aac0 1670 else
7b84aac0
EB
1671 {
1672 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491 1673
7b84aac0 1674 MEM_VOLATILE_P (memref) = 1;
5c35bc3e 1675 memref = validize_mem (memref);
edff2491 1676
7b84aac0 1677 /* See if we have an insn to probe the stack. */
10169a8b 1678 if (targetm.have_probe_stack ())
5c35bc3e 1679 emit_insn (targetm.gen_probe_stack (memref));
7b84aac0 1680 else
5c35bc3e 1681 emit_move_insn (memref, const0_rtx);
7b84aac0 1682 }
edff2491
RK
1683}
1684
d9b3eb63 1685/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1686 FIRST is a constant and size is a Pmode RTX. These are offsets from
1687 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1688 or subtract them from the stack pointer. */
1689
1690#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491 1691
62f9f30b 1692#if STACK_GROWS_DOWNWARD
edff2491 1693#define STACK_GROW_OP MINUS
d809253a
EB
1694#define STACK_GROW_OPTAB sub_optab
1695#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1696#else
1697#define STACK_GROW_OP PLUS
d809253a
EB
1698#define STACK_GROW_OPTAB add_optab
1699#define STACK_GROW_OFF(off) (off)
edff2491
RK
1700#endif
1701
1702void
502b8322 1703probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1704{
4b6c1672
RK
1705 /* First ensure SIZE is Pmode. */
1706 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1707 size = convert_to_mode (Pmode, size, 1);
1708
d809253a
EB
1709 /* Next see if we have a function to check the stack. */
1710 if (stack_check_libfunc)
f5f5363f 1711 {
d809253a 1712 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1713 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1714 stack_pointer_rtx,
0a81f074
RS
1715 plus_constant (Pmode,
1716 size, first)));
db69559b
RS
1717 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1718 addr, Pmode);
f5f5363f 1719 }
14a774a9 1720
d809253a 1721 /* Next see if we have an insn to check the stack. */
10169a8b 1722 else if (targetm.have_check_stack ())
edff2491 1723 {
99b1c316 1724 class expand_operand ops[1];
d809253a
EB
1725 rtx addr = memory_address (Pmode,
1726 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1727 stack_pointer_rtx,
0a81f074
RS
1728 plus_constant (Pmode,
1729 size, first)));
d6a6a07a 1730 bool success;
a5c7d693 1731 create_input_operand (&ops[0], addr, Pmode);
10169a8b 1732 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
d6a6a07a 1733 gcc_assert (success);
edff2491 1734 }
edff2491 1735
d809253a
EB
1736 /* Otherwise we have to generate explicit probes. If we have a constant
1737 small number of them to generate, that's the easy case. */
1738 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1739 {
d809253a
EB
1740 HOST_WIDE_INT isize = INTVAL (size), i;
1741 rtx addr;
1742
1743 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1744 it exceeds SIZE. If only one probe is needed, this will not
1745 generate any code. Then probe at FIRST + SIZE. */
1746 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1747 {
1748 addr = memory_address (Pmode,
0a81f074 1749 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1750 STACK_GROW_OFF (first + i)));
1751 emit_stack_probe (addr);
1752 }
1753
1754 addr = memory_address (Pmode,
0a81f074 1755 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1756 STACK_GROW_OFF (first + isize)));
1757 emit_stack_probe (addr);
edff2491
RK
1758 }
1759
d809253a
EB
1760 /* In the variable case, do the same as above, but in a loop. Note that we
1761 must be extra careful with variables wrapping around because we might be
1762 at the very top (or the very bottom) of the address space and we have to
1763 be able to handle this case properly; in particular, we use an equality
1764 test for the loop condition. */
edff2491
RK
1765 else
1766 {
d809253a 1767 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
528a80c1
DM
1768 rtx_code_label *loop_lab = gen_label_rtx ();
1769 rtx_code_label *end_lab = gen_label_rtx ();
edff2491 1770
d809253a
EB
1771 /* Step 1: round SIZE to the previous multiple of the interval. */
1772
1773 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1774 rounded_size
69a59f0f
RS
1775 = simplify_gen_binary (AND, Pmode, size,
1776 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1777 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1778
1779
1780 /* Step 2: compute initial and final value of the loop counter. */
1781
1782 /* TEST_ADDR = SP + FIRST. */
1783 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1784 stack_pointer_rtx,
4789c0ce
RS
1785 gen_int_mode (first, Pmode)),
1786 NULL_RTX);
d809253a
EB
1787
1788 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1789 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1790 test_addr,
1791 rounded_size_op), NULL_RTX);
1792
1793
1794 /* Step 3: the loop
1795
1796 while (TEST_ADDR != LAST_ADDR)
1797 {
1798 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1799 probe at TEST_ADDR
1800 }
1801
1802 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1803 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1804
1805 emit_label (loop_lab);
edff2491 1806
d809253a
EB
1807 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1808 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1809 end_lab);
1810
1811 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1812 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
2f1cd2eb 1813 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
edff2491 1814 1, OPTAB_WIDEN);
edff2491 1815
5b0264cb 1816 gcc_assert (temp == test_addr);
edff2491 1817
d809253a
EB
1818 /* Probe at TEST_ADDR. */
1819 emit_stack_probe (test_addr);
1820
1821 emit_jump (loop_lab);
1822
edff2491
RK
1823 emit_label (end_lab);
1824
d809253a
EB
1825
1826 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1827 that SIZE is equal to ROUNDED_SIZE. */
1828
1829 /* TEMP = SIZE - ROUNDED_SIZE. */
1830 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1831 if (temp != const0_rtx)
1832 {
1833 rtx addr;
1834
32990d5b 1835 if (CONST_INT_P (temp))
d809253a
EB
1836 {
1837 /* Use [base + disp} addressing mode if supported. */
1838 HOST_WIDE_INT offset = INTVAL (temp);
1839 addr = memory_address (Pmode,
0a81f074 1840 plus_constant (Pmode, last_addr,
d809253a
EB
1841 STACK_GROW_OFF (offset)));
1842 }
1843 else
1844 {
1845 /* Manual CSE if the difference is not known at compile-time. */
1846 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1847 addr = memory_address (Pmode,
1848 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1849 last_addr, temp));
1850 }
1851
1852 emit_stack_probe (addr);
1853 }
edff2491 1854 }
eabcc725
EB
1855
1856 /* Make sure nothing is scheduled before we are done. */
1857 emit_insn (gen_blockage ());
edff2491 1858}
d809253a 1859
8c1dd970
JL
1860/* Compute parameters for stack clash probing a dynamic stack
1861 allocation of SIZE bytes.
1862
1863 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1864
1865 Additionally we conditionally dump the type of probing that will
1866 be needed given the values computed. */
1867
1868void
1869compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1870 rtx *residual,
1871 HOST_WIDE_INT *probe_interval,
1872 rtx size)
1873{
1874 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1875 *probe_interval
028d4092 1876 = 1 << param_stack_clash_protection_probe_interval;
8c1dd970
JL
1877 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1878 GEN_INT (-*probe_interval));
1879
1880 /* Compute the value of the stack pointer for the last iteration.
1881 It's just SP + ROUNDED_SIZE. */
1882 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1883 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1884 stack_pointer_rtx,
1885 rounded_size_op),
1886 NULL_RTX);
1887
1888 /* Compute any residuals not allocated by the loop above. Residuals
1889 are just the ROUNDED_SIZE - SIZE. */
1890 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1891
1892 /* Dump key information to make writing tests easy. */
1893 if (dump_file)
1894 {
1895 if (*rounded_size == CONST0_RTX (Pmode))
1896 fprintf (dump_file,
1897 "Stack clash skipped dynamic allocation and probing loop.\n");
94c23e39 1898 else if (CONST_INT_P (*rounded_size)
8c1dd970
JL
1899 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1900 fprintf (dump_file,
1901 "Stack clash dynamic allocation and probing inline.\n");
94c23e39 1902 else if (CONST_INT_P (*rounded_size))
8c1dd970
JL
1903 fprintf (dump_file,
1904 "Stack clash dynamic allocation and probing in "
1905 "rotated loop.\n");
1906 else
1907 fprintf (dump_file,
1908 "Stack clash dynamic allocation and probing in loop.\n");
1909
1910 if (*residual != CONST0_RTX (Pmode))
1911 fprintf (dump_file,
1912 "Stack clash dynamic allocation and probing residuals.\n");
1913 else
1914 fprintf (dump_file,
1915 "Stack clash skipped dynamic allocation and "
1916 "probing residuals.\n");
1917 }
1918}
1919
1920/* Emit the start of an allocate/probe loop for stack
1921 clash protection.
1922
1923 LOOP_LAB and END_LAB are returned for use when we emit the
1924 end of the loop.
1925
1926 LAST addr is the value for SP which stops the loop. */
1927void
1928emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1929 rtx *end_lab,
1930 rtx last_addr,
1931 bool rotated)
1932{
1933 /* Essentially we want to emit any setup code, the top of loop
1934 label and the comparison at the top of the loop. */
1935 *loop_lab = gen_label_rtx ();
1936 *end_lab = gen_label_rtx ();
1937
1938 emit_label (*loop_lab);
1939 if (!rotated)
1940 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1941 Pmode, 1, *end_lab);
1942}
1943
1944/* Emit the end of a stack clash probing loop.
1945
1946 This consists of just the jump back to LOOP_LAB and
1947 emitting END_LOOP after the loop. */
1948
1949void
1950emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1951 rtx last_addr, bool rotated)
1952{
1953 if (rotated)
1954 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1955 Pmode, 1, loop_lab);
1956 else
1957 emit_jump (loop_lab);
1958
1959 emit_label (end_loop);
1960
1961}
1962
1963/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1964 while probing it. This pushes when SIZE is positive. SIZE need not
1965 be constant.
1966
1967 This is subtly different than anti_adjust_stack_and_probe to try and
1968 prevent stack-clash attacks
1969
1970 1. It must assume no knowledge of the probing state, any allocation
1971 must probe.
1972
1973 Consider the case of a 1 byte alloca in a loop. If the sum of the
1974 allocations is large, then this could be used to jump the guard if
1975 probes were not emitted.
1976
1977 2. It never skips probes, whereas anti_adjust_stack_and_probe will
f569026a
EB
1978 skip the probe on the first PROBE_INTERVAL on the assumption it
1979 was already done in the prologue and in previous allocations.
8c1dd970
JL
1980
1981 3. It only allocates and probes SIZE bytes, it does not need to
1982 allocate/probe beyond that because this probing style does not
1983 guarantee signal handling capability if the guard is hit. */
1984
d3e5bae1 1985void
8c1dd970
JL
1986anti_adjust_stack_and_probe_stack_clash (rtx size)
1987{
1988 /* First ensure SIZE is Pmode. */
1989 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1990 size = convert_to_mode (Pmode, size, 1);
1991
1992 /* We can get here with a constant size on some targets. */
1993 rtx rounded_size, last_addr, residual;
2c25083e
TC
1994 HOST_WIDE_INT probe_interval, probe_range;
1995 bool target_probe_range_p = false;
8c1dd970
JL
1996 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1997 &residual, &probe_interval, size);
1998
2c25083e
TC
1999 /* Get the back-end specific probe ranges. */
2000 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
2001 target_probe_range_p = probe_range != 0;
2002 gcc_assert (probe_range >= 0);
2003
2004 /* If no back-end specific range defined, default to the top of the newly
2005 allocated range. */
2006 if (probe_range == 0)
2007 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
2008
8c1dd970
JL
2009 if (rounded_size != CONST0_RTX (Pmode))
2010 {
94c23e39
JL
2011 if (CONST_INT_P (rounded_size)
2012 && INTVAL (rounded_size) <= 4 * probe_interval)
8c1dd970
JL
2013 {
2014 for (HOST_WIDE_INT i = 0;
2015 i < INTVAL (rounded_size);
2016 i += probe_interval)
2017 {
2018 anti_adjust_stack (GEN_INT (probe_interval));
8c1dd970
JL
2019 /* The prologue does not probe residuals. Thus the offset
2020 here to probe just beyond what the prologue had already
2021 allocated. */
2022 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e
TC
2023 probe_range));
2024
8c1dd970
JL
2025 emit_insn (gen_blockage ());
2026 }
2027 }
2028 else
2029 {
2030 rtx loop_lab, end_loop;
94c23e39 2031 bool rotate_loop = CONST_INT_P (rounded_size);
8c1dd970
JL
2032 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
2033 last_addr, rotate_loop);
2034
2035 anti_adjust_stack (GEN_INT (probe_interval));
2036
2037 /* The prologue does not probe residuals. Thus the offset here
2c25083e
TC
2038 to probe just beyond what the prologue had already
2039 allocated. */
8c1dd970 2040 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e 2041 probe_range));
8c1dd970
JL
2042
2043 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2044 last_addr, rotate_loop);
2045 emit_insn (gen_blockage ());
2046 }
2047 }
2048
2049 if (residual != CONST0_RTX (Pmode))
2050 {
86aa0691
JL
2051 rtx label = NULL_RTX;
2052 /* RESIDUAL could be zero at runtime and in that case *sp could
2053 hold live data. Furthermore, we do not want to probe into the
2054 red zone.
2055
2c25083e
TC
2056 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2057 probe at offset 0. In which case we no longer have to check for
2058 RESIDUAL == 0. However we still need to probe at the right offset
2059 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2060
2061 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2062 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2063 */
2064 anti_adjust_stack (residual);
2065
86aa0691
JL
2066 if (!CONST_INT_P (residual))
2067 {
2068 label = gen_label_rtx ();
2c25083e
TC
2069 rtx_code op = target_probe_range_p ? LT : EQ;
2070 rtx probe_cmp_value = target_probe_range_p
2071 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2072 : CONST0_RTX (GET_MODE (residual));
86aa0691 2073
2c25083e
TC
2074 if (target_probe_range_p)
2075 emit_stack_probe (stack_pointer_rtx);
8c1dd970 2076
2c25083e
TC
2077 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2078 op, NULL_RTX, Pmode, 1, label);
2079 }
2a6fc987 2080
2c25083e
TC
2081 rtx x = NULL_RTX;
2082
2083 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2084 by the ABI defined safe value. */
2085 if (!CONST_INT_P (residual) && target_probe_range_p)
2086 x = GEN_INT (probe_range);
2087 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2088 we still want to probe up, but the safest amount if a word. */
2089 else if (target_probe_range_p)
8c1dd970 2090 {
2c25083e
TC
2091 if (INTVAL (residual) <= probe_range)
2092 x = GEN_INT (GET_MODE_SIZE (word_mode));
2093 else
2094 x = GEN_INT (probe_range);
8c1dd970 2095 }
2c25083e
TC
2096 else
2097 /* If nothing else, probe at the top of the new allocation. */
2098 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2099
2100 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
86aa0691 2101
86aa0691 2102 emit_insn (gen_blockage ());
2c25083e
TC
2103 if (!CONST_INT_P (residual))
2104 emit_label (label);
8c1dd970
JL
2105 }
2106}
2107
2108
c35af30f
EB
2109/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2110 while probing it. This pushes when SIZE is positive. SIZE need not
2111 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2112 by plus SIZE at the end. */
d809253a 2113
c35af30f
EB
2114void
2115anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 2116{
c35af30f
EB
2117 /* We skip the probe for the first interval + a small dope of 4 words and
2118 probe that many bytes past the specified size to maintain a protection
2119 area at the botton of the stack. */
d809253a
EB
2120 const int dope = 4 * UNITS_PER_WORD;
2121
2122 /* First ensure SIZE is Pmode. */
2123 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2124 size = convert_to_mode (Pmode, size, 1);
2125
2126 /* If we have a constant small number of probes to generate, that's the
2127 easy case. */
32990d5b 2128 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
2129 {
2130 HOST_WIDE_INT isize = INTVAL (size), i;
2131 bool first_probe = true;
2132
260c8ba3 2133 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2134 values of N from 1 until it exceeds SIZE. If only one probe is
2135 needed, this will not generate any code. Then adjust and probe
2136 to PROBE_INTERVAL + SIZE. */
2137 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2138 {
2139 if (first_probe)
2140 {
2141 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2142 first_probe = false;
2143 }
2144 else
2145 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2146 emit_stack_probe (stack_pointer_rtx);
2147 }
2148
2149 if (first_probe)
0a81f074 2150 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d809253a 2151 else
0a81f074 2152 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
d809253a
EB
2153 emit_stack_probe (stack_pointer_rtx);
2154 }
2155
2156 /* In the variable case, do the same as above, but in a loop. Note that we
2157 must be extra careful with variables wrapping around because we might be
2158 at the very top (or the very bottom) of the address space and we have to
2159 be able to handle this case properly; in particular, we use an equality
2160 test for the loop condition. */
2161 else
2162 {
2163 rtx rounded_size, rounded_size_op, last_addr, temp;
528a80c1
DM
2164 rtx_code_label *loop_lab = gen_label_rtx ();
2165 rtx_code_label *end_lab = gen_label_rtx ();
d809253a
EB
2166
2167
2168 /* Step 1: round SIZE to the previous multiple of the interval. */
2169
2170 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2171 rounded_size
69a59f0f
RS
2172 = simplify_gen_binary (AND, Pmode, size,
2173 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
2174 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2175
2176
2177 /* Step 2: compute initial and final value of the loop counter. */
2178
2179 /* SP = SP_0 + PROBE_INTERVAL. */
2180 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2181
2182 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2183 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2184 stack_pointer_rtx,
2185 rounded_size_op), NULL_RTX);
2186
2187
2188 /* Step 3: the loop
2189
260c8ba3
EB
2190 while (SP != LAST_ADDR)
2191 {
2192 SP = SP + PROBE_INTERVAL
2193 probe at SP
2194 }
d809253a 2195
260c8ba3 2196 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2197 values of N from 1 until it is equal to ROUNDED_SIZE. */
2198
2199 emit_label (loop_lab);
2200
2201 /* Jump to END_LAB if SP == LAST_ADDR. */
2202 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2203 Pmode, 1, end_lab);
2204
2205 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2206 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2207 emit_stack_probe (stack_pointer_rtx);
2208
2209 emit_jump (loop_lab);
2210
2211 emit_label (end_lab);
2212
2213
260c8ba3 2214 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
2215 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2216
2217 /* TEMP = SIZE - ROUNDED_SIZE. */
2218 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2219 if (temp != const0_rtx)
2220 {
2221 /* Manual CSE if the difference is not known at compile-time. */
2222 if (GET_CODE (temp) != CONST_INT)
2223 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2224 anti_adjust_stack (temp);
2225 emit_stack_probe (stack_pointer_rtx);
2226 }
2227 }
2228
c35af30f
EB
2229 /* Adjust back and account for the additional first interval. */
2230 if (adjust_back)
0a81f074 2231 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
c35af30f
EB
2232 else
2233 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
2234}
2235
18ca7dab
RK
2236/* Return an rtx representing the register or memory location
2237 in which a scalar value of data type VALTYPE
2238 was returned by a function call to function FUNC.
1d636cc6
RG
2239 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2240 function is known, otherwise 0.
4dc07bd7
JJ
2241 OUTGOING is 1 if on a machine with register windows this function
2242 should return the register in which the function will put its result
30f7a378 2243 and 0 otherwise. */
18ca7dab
RK
2244
2245rtx
586de218 2246hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 2247 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 2248{
4dc07bd7 2249 rtx val;
770ae6cc 2250
1d636cc6 2251 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 2252
f8cfc6aa 2253 if (REG_P (val)
e1a4071f
JL
2254 && GET_MODE (val) == BLKmode)
2255 {
974aedcc 2256 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
59b51186 2257 opt_scalar_int_mode tmpmode;
770ae6cc 2258
d9b3eb63 2259 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
2260 since the value of bytes will then be large enough that no
2261 mode will match anyway. */
d9b3eb63 2262
c94843d2 2263 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
0fb7aeda
KH
2264 {
2265 /* Have we found a large enough mode? */
59b51186 2266 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
0fb7aeda
KH
2267 break;
2268 }
e1a4071f 2269
59b51186 2270 PUT_MODE (val, tmpmode.require ());
d9b3eb63 2271 }
e1a4071f 2272 return val;
18ca7dab
RK
2273}
2274
2275/* Return an rtx representing the register or memory location
2276 in which a scalar value of mode MODE was returned by a library call. */
2277
2278rtx
ef4bddc2 2279hard_libcall_value (machine_mode mode, rtx fun)
18ca7dab 2280{
390b17c2 2281 return targetm.calls.libcall_value (mode, fun);
18ca7dab 2282}
0c5e217d
RS
2283
2284/* Look up the tree code for a given rtx code
5c88ea94 2285 to provide the arithmetic operation for real_arithmetic.
0c5e217d
RS
2286 The function returns an int because the caller may not know
2287 what `enum tree_code' means. */
2288
2289int
502b8322 2290rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
2291{
2292 enum tree_code tcode;
2293
2294 switch (code)
2295 {
2296 case PLUS:
2297 tcode = PLUS_EXPR;
2298 break;
2299 case MINUS:
2300 tcode = MINUS_EXPR;
2301 break;
2302 case MULT:
2303 tcode = MULT_EXPR;
2304 break;
2305 case DIV:
2306 tcode = RDIV_EXPR;
2307 break;
2308 case SMIN:
2309 tcode = MIN_EXPR;
2310 break;
2311 case SMAX:
2312 tcode = MAX_EXPR;
2313 break;
2314 default:
2315 tcode = LAST_AND_UNUSED_TREE_CODE;
2316 break;
2317 }
2318 return ((int) tcode);
2319}
e2500fed
GK
2320
2321#include "gt-explow.h"