]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
Update copyright years.
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
a5544970 2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
18ca7dab 3
1322177d 4This file is part of GCC.
18ca7dab 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
18ca7dab 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
18ca7dab
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
18ca7dab
RK
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
957060b5
AM
24#include "target.h"
25#include "function.h"
18ca7dab
RK
26#include "rtl.h"
27#include "tree.h"
4d0cdd0c 28#include "memmodel.h"
6baf1cc8 29#include "tm_p.h"
957060b5 30#include "expmed.h"
357067f2 31#include "profile-count.h"
957060b5
AM
32#include "optabs.h"
33#include "emit-rtl.h"
34#include "recog.h"
35#include "diagnostic-core.h"
957060b5 36#include "stor-layout.h"
b38f3813 37#include "except.h"
36566b39
PK
38#include "dojump.h"
39#include "explow.h"
18ca7dab 40#include "expr.h"
677f3fa8 41#include "common/common-target.h"
aacd3885 42#include "output.h"
8c1dd970 43#include "params.h"
18ca7dab 44
502b8322 45static rtx break_out_memory_refs (rtx);
8c1dd970 46static void anti_adjust_stack_and_probe_stack_clash (rtx);
7e4ce834
RH
47
48
49/* Truncate and perhaps sign-extend C as appropriate for MODE. */
50
51HOST_WIDE_INT
ef4bddc2 52trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
7e4ce834 53{
db61b7f9
RS
54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode = as_a <scalar_mode> (mode);
56 int width = GET_MODE_PRECISION (smode);
7e4ce834 57
71012d97 58 /* You want to truncate to a _what_? */
eafa30ef 59 gcc_assert (SCALAR_INT_MODE_P (mode));
71012d97 60
1f3f36d1 61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
db61b7f9 62 if (smode == BImode)
1f3f36d1
RH
63 return c & 1 ? STORE_FLAG_VALUE : 0;
64
5b0d91c3
AO
65 /* Sign-extend for the requested mode. */
66
67 if (width < HOST_BITS_PER_WIDE_INT)
68 {
69 HOST_WIDE_INT sign = 1;
70 sign <<= width - 1;
71 c &= (sign << 1) - 1;
72 c ^= sign;
73 c -= sign;
74 }
7e4ce834
RH
75
76 return c;
77}
78
0c12fc9b
RS
79/* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
81
82poly_int64
83trunc_int_for_mode (poly_int64 x, machine_mode mode)
84{
85 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
86 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
87 return x;
88}
89
929e10f4 90/* Return an rtx for the sum of X and the integer C, given that X has
23b33725
RS
91 mode MODE. INPLACE is true if X can be modified inplace or false
92 if it must be treated as immutable. */
18ca7dab
RK
93
94rtx
0c12fc9b 95plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
18ca7dab 96{
b3694847 97 RTX_CODE code;
17ab7c59 98 rtx y;
b3694847 99 rtx tem;
18ca7dab
RK
100 int all_constant = 0;
101
0a81f074
RS
102 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
103
0c12fc9b 104 if (known_eq (c, 0))
18ca7dab
RK
105 return x;
106
107 restart:
108
109 code = GET_CODE (x);
17ab7c59
RK
110 y = x;
111
18ca7dab
RK
112 switch (code)
113 {
807e902e 114 CASE_CONST_SCALAR_INT:
f079167a 115 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
18ca7dab
RK
116 case MEM:
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122 {
8a27cf73
UB
123 rtx cst = get_pool_constant (XEXP (x, 0));
124
125 if (GET_CODE (cst) == CONST_VECTOR
126 && GET_MODE_INNER (GET_MODE (cst)) == mode)
127 {
128 cst = gen_lowpart (mode, cst);
129 gcc_assert (cst);
130 }
2b568899
RB
131 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
132 {
133 tem = plus_constant (mode, cst, c);
134 tem = force_const_mem (GET_MODE (x), tem);
135 /* Targets may disallow some constants in the constant pool, thus
136 force_const_mem may return NULL_RTX. */
137 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
138 return tem;
139 }
18ca7dab
RK
140 }
141 break;
142
143 case CONST:
144 /* If adding to something entirely constant, set a flag
145 so that we can add a CONST around the result. */
23b33725
RS
146 if (inplace && shared_const_p (x))
147 inplace = false;
18ca7dab
RK
148 x = XEXP (x, 0);
149 all_constant = 1;
150 goto restart;
151
152 case SYMBOL_REF:
153 case LABEL_REF:
154 all_constant = 1;
155 break;
156
157 case PLUS:
929e10f4
MS
158 /* The interesting case is adding the integer to a sum. Look
159 for constant term in the sum and combine with C. For an
160 integer constant term or a constant term that is not an
161 explicit integer, we combine or group them together anyway.
03d937fc
R
162
163 We may not immediately return from the recursive call here, lest
164 all_constant gets lost. */
e5671f2b 165
929e10f4 166 if (CONSTANT_P (XEXP (x, 1)))
03d937fc 167 {
23b33725
RS
168 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
169 if (term == const0_rtx)
170 x = XEXP (x, 0);
171 else if (inplace)
172 XEXP (x, 1) = term;
173 else
174 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
03d937fc
R
175 c = 0;
176 }
23b33725 177 else if (rtx *const_loc = find_constant_term_loc (&y))
03d937fc 178 {
23b33725
RS
179 if (!inplace)
180 {
181 /* We need to be careful since X may be shared and we can't
182 modify it in place. */
183 x = copy_rtx (x);
184 const_loc = find_constant_term_loc (&x);
185 }
186 *const_loc = plus_constant (mode, *const_loc, c, true);
03d937fc
R
187 c = 0;
188 }
38a448ca 189 break;
ed8908e7 190
38a448ca 191 default:
0c12fc9b
RS
192 if (CONST_POLY_INT_P (x))
193 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
38a448ca 194 break;
18ca7dab
RK
195 }
196
0c12fc9b 197 if (maybe_ne (c, 0))
4789c0ce 198 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
18ca7dab
RK
199
200 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
201 return x;
202 else if (all_constant)
38a448ca 203 return gen_rtx_CONST (mode, x);
18ca7dab
RK
204 else
205 return x;
206}
18ca7dab
RK
207\f
208/* If X is a sum, return a new sum like X but lacking any constant terms.
209 Add all the removed constant terms into *CONSTPTR.
210 X itself is not altered. The result != X if and only if
211 it is not isomorphic to X. */
212
213rtx
502b8322 214eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 215{
b3694847 216 rtx x0, x1;
18ca7dab
RK
217 rtx tem;
218
219 if (GET_CODE (x) != PLUS)
220 return x;
221
222 /* First handle constants appearing at this level explicitly. */
481683e1 223 if (CONST_INT_P (XEXP (x, 1))
01512446
JJ
224 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
225 XEXP (x, 1))) != 0
481683e1 226 && CONST_INT_P (tem))
18ca7dab
RK
227 {
228 *constptr = tem;
229 return eliminate_constant_term (XEXP (x, 0), constptr);
230 }
231
232 tem = const0_rtx;
233 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
234 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
235 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
01512446
JJ
236 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
237 *constptr, tem)) != 0
481683e1 238 && CONST_INT_P (tem))
18ca7dab
RK
239 {
240 *constptr = tem;
38a448ca 241 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
242 }
243
244 return x;
245}
246
18ca7dab
RK
247\f
248/* Return a copy of X in which all memory references
249 and all constants that involve symbol refs
250 have been replaced with new temporary registers.
251 Also emit code to load the memory locations and constants
252 into those registers.
253
254 If X contains no such constants or memory references,
255 X itself (not a copy) is returned.
256
257 If a constant is found in the address that is not a legitimate constant
258 in an insn, it is left alone in the hope that it might be valid in the
259 address.
260
261 X may contain no arithmetic except addition, subtraction and multiplication.
262 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
263
264static rtx
502b8322 265break_out_memory_refs (rtx x)
18ca7dab 266{
3c0cb5de 267 if (MEM_P (x)
cabeca29 268 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 269 && GET_MODE (x) != VOIDmode))
2cca6e3f 270 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
271 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
272 || GET_CODE (x) == MULT)
273 {
b3694847
SS
274 rtx op0 = break_out_memory_refs (XEXP (x, 0));
275 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 276
18ca7dab 277 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 278 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 279 }
2cca6e3f 280
18ca7dab
RK
281 return x;
282}
283
d4ebfa65
BE
284/* Given X, a memory address in address space AS' pointer mode, convert it to
285 an address in the address space's address mode, or vice versa (TO_MODE says
286 which way). We take advantage of the fact that pointers are not allowed to
287 overflow by commuting arithmetic operations over conversions so that address
7745730f 288 arithmetic insns can be used. IN_CONST is true if this conversion is inside
3d3f9e7e
JJ
289 a CONST. NO_EMIT is true if no insns should be emitted, and instead
290 it should return NULL if it can't be simplified without emitting insns. */
ea534b63 291
3d3f9e7e 292rtx
095a2d76 293convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
7745730f 294 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
3d3f9e7e
JJ
295 bool in_const ATTRIBUTE_UNUSED,
296 bool no_emit ATTRIBUTE_UNUSED)
ea534b63 297{
5ae6cd0d 298#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 299 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
300 return x;
301#else /* defined(POINTERS_EXTEND_UNSIGNED) */
095a2d76 302 scalar_int_mode pointer_mode, address_mode, from_mode;
498b529f 303 rtx temp;
aa0f70e6 304 enum rtx_code code;
498b529f 305
5ae6cd0d
MM
306 /* If X already has the right mode, just return it. */
307 if (GET_MODE (x) == to_mode)
308 return x;
309
d4ebfa65
BE
310 pointer_mode = targetm.addr_space.pointer_mode (as);
311 address_mode = targetm.addr_space.address_mode (as);
312 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 313
0b04ec8c
RK
314 /* Here we handle some special cases. If none of them apply, fall through
315 to the default case. */
ea534b63
RK
316 switch (GET_CODE (x))
317 {
d8116890 318 CASE_CONST_SCALAR_INT:
aa0f70e6
SE
319 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
320 code = TRUNCATE;
321 else if (POINTERS_EXTEND_UNSIGNED < 0)
322 break;
323 else if (POINTERS_EXTEND_UNSIGNED > 0)
324 code = ZERO_EXTEND;
325 else
326 code = SIGN_EXTEND;
327 temp = simplify_unary_operation (code, to_mode, x, from_mode);
328 if (temp)
329 return temp;
330 break;
498b529f 331
d1405722 332 case SUBREG:
5da4f548 333 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 334 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
335 return SUBREG_REG (x);
336 break;
337
ea534b63 338 case LABEL_REF:
04a121a7 339 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
5da4f548
SE
340 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
341 return temp;
498b529f 342
ea534b63 343 case SYMBOL_REF:
ce02ba25
EC
344 temp = shallow_copy_rtx (x);
345 PUT_MODE (temp, to_mode);
5da4f548 346 return temp;
ea534b63 347
498b529f 348 case CONST:
3d3f9e7e
JJ
349 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
350 true, no_emit);
351 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
ea534b63 352
0b04ec8c
RK
353 case PLUS:
354 case MULT:
ceeb2cbc
AP
355 /* For addition we can safely permute the conversion and addition
356 operation if one operand is a constant and converting the constant
357 does not change it or if one operand is a constant and we are
358 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
17939c98 359 We can always safely permute them if we are making the address
7745730f
AP
360 narrower. Inside a CONST RTL, this is safe for both pointers
361 zero or sign extended as pointers cannot wrap. */
aa0f70e6
SE
362 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
363 || (GET_CODE (x) == PLUS
481683e1 364 && CONST_INT_P (XEXP (x, 1))
7745730f
AP
365 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
366 || XEXP (x, 1) == convert_memory_address_addr_space_1
3d3f9e7e
JJ
367 (to_mode, XEXP (x, 1), as, in_const,
368 no_emit)
7745730f 369 || POINTERS_EXTEND_UNSIGNED < 0)))
3d3f9e7e
JJ
370 {
371 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
372 as, in_const, no_emit);
b88990be
JJ
373 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
374 temp, XEXP (x, 1))
375 : temp);
3d3f9e7e 376 }
38a448ca 377 break;
d9b3eb63 378
38a448ca
RH
379 default:
380 break;
ea534b63 381 }
0b04ec8c 382
3d3f9e7e
JJ
383 if (no_emit)
384 return NULL_RTX;
385
0b04ec8c
RK
386 return convert_modes (to_mode, from_mode,
387 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 388#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 389}
7745730f
AP
390
391/* Given X, a memory address in address space AS' pointer mode, convert it to
392 an address in the address space's address mode, or vice versa (TO_MODE says
393 which way). We take advantage of the fact that pointers are not allowed to
394 overflow by commuting arithmetic operations over conversions so that address
395 arithmetic insns can be used. */
396
397rtx
095a2d76
RS
398convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
399 addr_space_t as)
7745730f 400{
3d3f9e7e 401 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
7745730f 402}
18ca7dab 403\f
36566b39 404
09e881c9
BE
405/* Return something equivalent to X but valid as a memory address for something
406 of mode MODE in the named address space AS. When X is not itself valid,
407 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
408
409rtx
ef4bddc2 410memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
18ca7dab 411{
b3694847 412 rtx oldx = x;
095a2d76 413 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 414
d4ebfa65 415 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 416
ba228239 417 /* By passing constant addresses through registers
18ca7dab 418 we get a chance to cse them. */
cabeca29 419 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 420 x = force_reg (address_mode, x);
18ca7dab 421
18ca7dab
RK
422 /* We get better cse by rejecting indirect addressing at this stage.
423 Let the combiner create indirect addresses where appropriate.
424 For now, generate the code so that the subexpressions useful to share
425 are visible. But not if cse won't be done! */
18b9ca6f 426 else
18ca7dab 427 {
f8cfc6aa 428 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
429 x = break_out_memory_refs (x);
430
431 /* At this point, any valid address is accepted. */
09e881c9 432 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 433 goto done;
18b9ca6f
RK
434
435 /* If it was valid before but breaking out memory refs invalidated it,
436 use it the old way. */
09e881c9 437 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
438 {
439 x = oldx;
440 goto done;
441 }
18b9ca6f
RK
442
443 /* Perform machine-dependent transformations on X
444 in certain cases. This is not necessary since the code
445 below can handle all possible cases, but machine-dependent
446 transformations can make better code. */
506d7b68 447 {
09e881c9
BE
448 rtx orig_x = x;
449 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
450 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
451 goto done;
452 }
18b9ca6f
RK
453
454 /* PLUS and MULT can appear in special ways
455 as the result of attempts to make an address usable for indexing.
456 Usually they are dealt with by calling force_operand, below.
457 But a sum containing constant terms is special
458 if removing them makes the sum a valid address:
459 then we generate that address in a register
460 and index off of it. We do this because it often makes
461 shorter code, and because the addresses thus generated
462 in registers often become common subexpressions. */
463 if (GET_CODE (x) == PLUS)
464 {
465 rtx constant_term = const0_rtx;
466 rtx y = eliminate_constant_term (x, &constant_term);
467 if (constant_term == const0_rtx
09e881c9 468 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
469 x = force_operand (x, NULL_RTX);
470 else
471 {
38a448ca 472 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 473 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
474 x = force_operand (x, NULL_RTX);
475 else
476 x = y;
477 }
478 }
18ca7dab 479
e475ed2a 480 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 481 x = force_operand (x, NULL_RTX);
18ca7dab 482
18b9ca6f
RK
483 /* If we have a register that's an invalid address,
484 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 485 else if (REG_P (x))
18b9ca6f
RK
486 x = copy_to_reg (x);
487
488 /* Last resort: copy the value to a register, since
489 the register is a valid address. */
490 else
d4ebfa65 491 x = force_reg (address_mode, x);
18ca7dab 492 }
18b9ca6f
RK
493
494 done:
495
09e881c9 496 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
497 /* If we didn't change the address, we are done. Otherwise, mark
498 a reg as a pointer if we have REG or REG + CONST_INT. */
499 if (oldx == x)
500 return x;
f8cfc6aa 501 else if (REG_P (x))
bdb429a5 502 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 503 else if (GET_CODE (x) == PLUS
f8cfc6aa 504 && REG_P (XEXP (x, 0))
481683e1 505 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 506 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 507
18b9ca6f
RK
508 /* OLDX may have been the address on a temporary. Update the address
509 to indicate that X is now used. */
510 update_temp_slot_address (oldx, x);
511
18ca7dab
RK
512 return x;
513}
514
b8105705
EB
515/* Convert a mem ref into one with a valid memory address.
516 Pass through anything else unchanged. */
18ca7dab
RK
517
518rtx
502b8322 519validize_mem (rtx ref)
18ca7dab 520{
3c0cb5de 521 if (!MEM_P (ref))
18ca7dab 522 return ref;
aacd3885 523 ref = use_anchored_address (ref);
09e881c9
BE
524 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
525 MEM_ADDR_SPACE (ref)))
18ca7dab 526 return ref;
792760b9 527
b8105705
EB
528 /* Don't alter REF itself, since that is probably a stack slot. */
529 return replace_equiv_address (ref, XEXP (ref, 0));
18ca7dab 530}
aacd3885
RS
531
532/* If X is a memory reference to a member of an object block, try rewriting
533 it to use an anchor instead. Return the new memory reference on success
534 and the old one on failure. */
535
536rtx
537use_anchored_address (rtx x)
538{
539 rtx base;
540 HOST_WIDE_INT offset;
ef4bddc2 541 machine_mode mode;
aacd3885
RS
542
543 if (!flag_section_anchors)
544 return x;
545
546 if (!MEM_P (x))
547 return x;
548
549 /* Split the address into a base and offset. */
550 base = XEXP (x, 0);
551 offset = 0;
552 if (GET_CODE (base) == CONST
553 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 554 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
555 {
556 offset += INTVAL (XEXP (XEXP (base, 0), 1));
557 base = XEXP (XEXP (base, 0), 0);
558 }
559
560 /* Check whether BASE is suitable for anchors. */
561 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 562 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 563 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 564 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
565 || !targetm.use_anchors_for_symbol_p (base))
566 return x;
567
568 /* Decide where BASE is going to be. */
569 place_block_symbol (base);
570
571 /* Get the anchor we need to use. */
572 offset += SYMBOL_REF_BLOCK_OFFSET (base);
573 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
574 SYMBOL_REF_TLS_MODEL (base));
575
576 /* Work out the offset from the anchor. */
577 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
578
579 /* If we're going to run a CSE pass, force the anchor into a register.
580 We will then be able to reuse registers for several accesses, if the
581 target costs say that that's worthwhile. */
0a81f074 582 mode = GET_MODE (base);
aacd3885 583 if (!cse_not_expected)
0a81f074 584 base = force_reg (mode, base);
aacd3885 585
0a81f074 586 return replace_equiv_address (x, plus_constant (mode, base, offset));
aacd3885 587}
18ca7dab 588\f
18ca7dab
RK
589/* Copy the value or contents of X to a new temp reg and return that reg. */
590
591rtx
502b8322 592copy_to_reg (rtx x)
18ca7dab 593{
b3694847 594 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 595
18ca7dab 596 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 597 do the computation. */
18ca7dab
RK
598 if (! general_operand (x, VOIDmode))
599 x = force_operand (x, temp);
d9b3eb63 600
18ca7dab
RK
601 if (x != temp)
602 emit_move_insn (temp, x);
603
604 return temp;
605}
606
607/* Like copy_to_reg but always give the new register mode Pmode
608 in case X is a constant. */
609
610rtx
502b8322 611copy_addr_to_reg (rtx x)
18ca7dab
RK
612{
613 return copy_to_mode_reg (Pmode, x);
614}
615
616/* Like copy_to_reg but always give the new register mode MODE
617 in case X is a constant. */
618
619rtx
ef4bddc2 620copy_to_mode_reg (machine_mode mode, rtx x)
18ca7dab 621{
b3694847 622 rtx temp = gen_reg_rtx (mode);
d9b3eb63 623
18ca7dab 624 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 625 do the computation. */
18ca7dab
RK
626 if (! general_operand (x, VOIDmode))
627 x = force_operand (x, temp);
628
5b0264cb 629 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
630 if (x != temp)
631 emit_move_insn (temp, x);
632 return temp;
633}
634
635/* Load X into a register if it is not already one.
636 Use mode MODE for the register.
637 X should be valid for mode MODE, but it may be a constant which
638 is valid for all integer modes; that's why caller must specify MODE.
639
640 The caller must not alter the value in the register we return,
641 since we mark it as a "constant" register. */
642
643rtx
ef4bddc2 644force_reg (machine_mode mode, rtx x)
18ca7dab 645{
528a80c1
DM
646 rtx temp, set;
647 rtx_insn *insn;
18ca7dab 648
f8cfc6aa 649 if (REG_P (x))
18ca7dab 650 return x;
d9b3eb63 651
e3c8ea67
RH
652 if (general_operand (x, mode))
653 {
654 temp = gen_reg_rtx (mode);
655 insn = emit_move_insn (temp, x);
656 }
657 else
658 {
659 temp = force_operand (x, NULL_RTX);
f8cfc6aa 660 if (REG_P (temp))
e3c8ea67
RH
661 insn = get_last_insn ();
662 else
663 {
664 rtx temp2 = gen_reg_rtx (mode);
665 insn = emit_move_insn (temp2, temp);
666 temp = temp2;
667 }
668 }
62874575 669
18ca7dab 670 /* Let optimizers know that TEMP's value never changes
62874575
RK
671 and that X can be substituted for it. Don't get confused
672 if INSN set something else (such as a SUBREG of TEMP). */
673 if (CONSTANT_P (x)
674 && (set = single_set (insn)) != 0
fd7acc30
RS
675 && SET_DEST (set) == temp
676 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 677 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 678
4a4f95d9
RH
679 /* Let optimizers know that TEMP is a pointer, and if so, the
680 known alignment of that pointer. */
681 {
682 unsigned align = 0;
683 if (GET_CODE (x) == SYMBOL_REF)
684 {
685 align = BITS_PER_UNIT;
686 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
687 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
688 }
689 else if (GET_CODE (x) == LABEL_REF)
690 align = BITS_PER_UNIT;
691 else if (GET_CODE (x) == CONST
692 && GET_CODE (XEXP (x, 0)) == PLUS
693 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 694 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
695 {
696 rtx s = XEXP (XEXP (x, 0), 0);
697 rtx c = XEXP (XEXP (x, 0), 1);
698 unsigned sa, ca;
699
700 sa = BITS_PER_UNIT;
701 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
702 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
703
bd95721f
RH
704 if (INTVAL (c) == 0)
705 align = sa;
706 else
707 {
708 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
709 align = MIN (sa, ca);
710 }
4a4f95d9
RH
711 }
712
0a317111 713 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
714 mark_reg_pointer (temp, align);
715 }
716
18ca7dab
RK
717 return temp;
718}
719
720/* If X is a memory ref, copy its contents to a new temp reg and return
721 that reg. Otherwise, return X. */
722
723rtx
502b8322 724force_not_mem (rtx x)
18ca7dab 725{
b3694847 726 rtx temp;
fe3439b0 727
3c0cb5de 728 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 729 return x;
fe3439b0 730
18ca7dab 731 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
732
733 if (MEM_POINTER (x))
734 REG_POINTER (temp) = 1;
735
18ca7dab
RK
736 emit_move_insn (temp, x);
737 return temp;
738}
739
740/* Copy X to TARGET (if it's nonzero and a reg)
741 or to a new temp reg and return that reg.
742 MODE is the mode to use for X in case it is a constant. */
743
744rtx
ef4bddc2 745copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
18ca7dab 746{
b3694847 747 rtx temp;
18ca7dab 748
f8cfc6aa 749 if (target && REG_P (target))
18ca7dab
RK
750 temp = target;
751 else
752 temp = gen_reg_rtx (mode);
753
754 emit_move_insn (temp, x);
755 return temp;
756}
757\f
cde0f3fd 758/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
759 PUNSIGNEDP points to the signedness of the type and may be adjusted
760 to show what signedness to use on extension operations.
761
cde0f3fd
PB
762 FOR_RETURN is nonzero if the caller is promoting the return value
763 of FNDECL, else it is for promoting args. */
9ff65789 764
ef4bddc2
RS
765machine_mode
766promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
cde0f3fd
PB
767 const_tree funtype, int for_return)
768{
5e617be8
AK
769 /* Called without a type node for a libcall. */
770 if (type == NULL_TREE)
771 {
772 if (INTEGRAL_MODE_P (mode))
773 return targetm.calls.promote_function_mode (NULL_TREE, mode,
774 punsignedp, funtype,
775 for_return);
776 else
777 return mode;
778 }
779
cde0f3fd
PB
780 switch (TREE_CODE (type))
781 {
782 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
783 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
784 case POINTER_TYPE: case REFERENCE_TYPE:
785 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
786 for_return);
787
788 default:
789 return mode;
790 }
791}
792/* Return the mode to use to store a scalar of TYPE and MODE.
793 PUNSIGNEDP points to the signedness of the type and may be adjusted
794 to show what signedness to use on extension operations. */
d4453b7a 795
ef4bddc2
RS
796machine_mode
797promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
b1680483 798 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 799{
1e3287d0
RG
800#ifdef PROMOTE_MODE
801 enum tree_code code;
802 int unsignedp;
79d22165 803 scalar_mode smode;
1e3287d0
RG
804#endif
805
5e617be8
AK
806 /* For libcalls this is invoked without TYPE from the backends
807 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
808 case. */
809 if (type == NULL_TREE)
810 return mode;
811
cde0f3fd
PB
812 /* FIXME: this is the same logic that was there until GCC 4.4, but we
813 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
814 is not defined. The affected targets are M32C, S390, SPARC. */
815#ifdef PROMOTE_MODE
1e3287d0
RG
816 code = TREE_CODE (type);
817 unsignedp = *punsignedp;
9ff65789 818
9ff65789
RK
819 switch (code)
820 {
9ff65789 821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 822 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
79d22165
RS
823 /* Values of these types always have scalar mode. */
824 smode = as_a <scalar_mode> (mode);
825 PROMOTE_MODE (smode, unsignedp, type);
cde0f3fd 826 *punsignedp = unsignedp;
79d22165 827 return smode;
9ff65789 828
ea534b63 829#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 830 case REFERENCE_TYPE:
9ff65789 831 case POINTER_TYPE:
cde0f3fd 832 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
833 return targetm.addr_space.address_mode
834 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
ea534b63 835#endif
d9b3eb63 836
38a448ca 837 default:
cde0f3fd 838 return mode;
9ff65789 839 }
cde0f3fd 840#else
9ff65789 841 return mode;
cde0f3fd 842#endif
9ff65789 843}
cde0f3fd
PB
844
845
846/* Use one of promote_mode or promote_function_mode to find the promoted
847 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
848 of DECL after promotion. */
849
ef4bddc2 850machine_mode
cde0f3fd
PB
851promote_decl_mode (const_tree decl, int *punsignedp)
852{
853 tree type = TREE_TYPE (decl);
854 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
855 machine_mode mode = DECL_MODE (decl);
856 machine_mode pmode;
cde0f3fd 857
f11a7b6d
AO
858 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
859 pmode = promote_function_mode (type, mode, &unsignedp,
860 TREE_TYPE (current_function_decl), 1);
861 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 862 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 863 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
864 else
865 pmode = promote_mode (type, mode, &unsignedp);
866
867 if (punsignedp)
868 *punsignedp = unsignedp;
869 return pmode;
870}
871
1f9ceff1
AO
872/* Return the promoted mode for name. If it is a named SSA_NAME, it
873 is the same as promote_decl_mode. Otherwise, it is the promoted
874 mode of a temp decl of same type as the SSA_NAME, if we had created
875 one. */
876
877machine_mode
878promote_ssa_mode (const_tree name, int *punsignedp)
879{
880 gcc_assert (TREE_CODE (name) == SSA_NAME);
881
882 /* Partitions holding parms and results must be promoted as expected
883 by function.c. */
884 if (SSA_NAME_VAR (name)
885 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
886 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
f11a7b6d
AO
887 {
888 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
889 if (mode != BLKmode)
890 return mode;
891 }
1f9ceff1
AO
892
893 tree type = TREE_TYPE (name);
894 int unsignedp = TYPE_UNSIGNED (type);
895 machine_mode mode = TYPE_MODE (type);
896
f11a7b6d
AO
897 /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */
898 if (mode == BLKmode)
899 {
900 gcc_assert (VECTOR_TYPE_P (type));
901 mode = type->type_common.mode;
902 }
903
1f9ceff1
AO
904 machine_mode pmode = promote_mode (type, mode, &unsignedp);
905 if (punsignedp)
906 *punsignedp = unsignedp;
907
908 return pmode;
909}
910
911
9ff65789 912\f
9c582551 913/* Controls the behavior of {anti_,}adjust_stack. */
9a08d230
RH
914static bool suppress_reg_args_size;
915
916/* A helper for adjust_stack and anti_adjust_stack. */
917
918static void
919adjust_stack_1 (rtx adjust, bool anti_p)
920{
528a80c1
DM
921 rtx temp;
922 rtx_insn *insn;
9a08d230 923
9a08d230 924 /* Hereafter anti_p means subtract_p. */
581edfa3
TS
925 if (!STACK_GROWS_DOWNWARD)
926 anti_p = !anti_p;
9a08d230
RH
927
928 temp = expand_binop (Pmode,
929 anti_p ? sub_optab : add_optab,
930 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
931 OPTAB_LIB_WIDEN);
932
933 if (temp != stack_pointer_rtx)
934 insn = emit_move_insn (stack_pointer_rtx, temp);
935 else
936 {
937 insn = get_last_insn ();
938 temp = single_set (insn);
939 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
940 }
941
942 if (!suppress_reg_args_size)
68184180 943 add_args_size_note (insn, stack_pointer_delta);
9a08d230
RH
944}
945
18ca7dab
RK
946/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
947 This pops when ADJUST is positive. ADJUST need not be constant. */
948
949void
502b8322 950adjust_stack (rtx adjust)
18ca7dab 951{
18ca7dab
RK
952 if (adjust == const0_rtx)
953 return;
954
1503a7ec
JH
955 /* We expect all variable sized adjustments to be multiple of
956 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
957 poly_int64 const_adjust;
958 if (poly_int_rtx_p (adjust, &const_adjust))
959 stack_pointer_delta -= const_adjust;
1503a7ec 960
9a08d230 961 adjust_stack_1 (adjust, false);
18ca7dab
RK
962}
963
964/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
965 This pushes when ADJUST is positive. ADJUST need not be constant. */
966
967void
502b8322 968anti_adjust_stack (rtx adjust)
18ca7dab 969{
18ca7dab
RK
970 if (adjust == const0_rtx)
971 return;
972
1503a7ec
JH
973 /* We expect all variable sized adjustments to be multiple of
974 PREFERRED_STACK_BOUNDARY. */
5284e559
RS
975 poly_int64 const_adjust;
976 if (poly_int_rtx_p (adjust, &const_adjust))
977 stack_pointer_delta += const_adjust;
1503a7ec 978
9a08d230 979 adjust_stack_1 (adjust, true);
18ca7dab
RK
980}
981
982/* Round the size of a block to be pushed up to the boundary required
983 by this machine. SIZE is the desired size, which need not be constant. */
984
4dd9b044 985static rtx
502b8322 986round_push (rtx size)
18ca7dab 987{
32990d5b 988 rtx align_rtx, alignm1_rtx;
41ee3204 989
32990d5b
JJ
990 if (!SUPPORTS_STACK_ALIGNMENT
991 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 992 {
32990d5b
JJ
993 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
994
995 if (align == 1)
996 return size;
997
998 if (CONST_INT_P (size))
999 {
1000 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 1001
32990d5b
JJ
1002 if (INTVAL (size) != new_size)
1003 size = GEN_INT (new_size);
1004 return size;
1005 }
1006
1007 align_rtx = GEN_INT (align);
1008 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
1009 }
1010 else
1011 {
32990d5b
JJ
1012 /* If crtl->preferred_stack_boundary might still grow, use
1013 virtual_preferred_stack_boundary_rtx instead. This will be
1014 substituted by the right value in vregs pass and optimized
1015 during combine. */
1016 align_rtx = virtual_preferred_stack_boundary_rtx;
0a81f074
RS
1017 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1018 NULL_RTX);
18ca7dab 1019 }
41ee3204 1020
32990d5b
JJ
1021 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1022 but we know it can't. So add ourselves and then do
1023 TRUNC_DIV_EXPR. */
1024 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1025 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1026 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1027 NULL_RTX, 1);
1028 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1029
18ca7dab
RK
1030 return size;
1031}
1032\f
59257ff7
RK
1033/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1034 to a previously-created save area. If no save area has been allocated,
1035 this function will allocate one. If a save area is specified, it
9eac0f2a 1036 must be of the proper mode. */
59257ff7
RK
1037
1038void
9eac0f2a 1039emit_stack_save (enum save_level save_level, rtx *psave)
59257ff7
RK
1040{
1041 rtx sa = *psave;
1042 /* The default is that we use a move insn and save in a Pmode object. */
4476e1a0 1043 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
ef4bddc2 1044 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
1045
1046 /* See if this machine has anything special to do for this kind of save. */
1047 switch (save_level)
1048 {
59257ff7 1049 case SAVE_BLOCK:
4476e1a0
RS
1050 if (targetm.have_save_stack_block ())
1051 fcn = targetm.gen_save_stack_block;
59257ff7 1052 break;
59257ff7 1053 case SAVE_FUNCTION:
4476e1a0
RS
1054 if (targetm.have_save_stack_function ())
1055 fcn = targetm.gen_save_stack_function;
59257ff7 1056 break;
59257ff7 1057 case SAVE_NONLOCAL:
4476e1a0
RS
1058 if (targetm.have_save_stack_nonlocal ())
1059 fcn = targetm.gen_save_stack_nonlocal;
59257ff7 1060 break;
38a448ca
RH
1061 default:
1062 break;
59257ff7
RK
1063 }
1064
1065 /* If there is no save area and we have to allocate one, do so. Otherwise
1066 verify the save area is the proper mode. */
1067
1068 if (sa == 0)
1069 {
1070 if (mode != VOIDmode)
1071 {
1072 if (save_level == SAVE_NONLOCAL)
1073 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1074 else
1075 *psave = sa = gen_reg_rtx (mode);
1076 }
1077 }
59257ff7 1078
9eac0f2a
RH
1079 do_pending_stack_adjust ();
1080 if (sa != 0)
1081 sa = validize_mem (sa);
1082 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
1083}
1084
1085/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
9eac0f2a 1086 area made by emit_stack_save. If it is zero, we have nothing to do. */
59257ff7
RK
1087
1088void
9eac0f2a 1089emit_stack_restore (enum save_level save_level, rtx sa)
59257ff7
RK
1090{
1091 /* The default is that we use a move insn. */
4476e1a0 1092 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
59257ff7 1093
50025f91
TV
1094 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1095 STACK_POINTER and HARD_FRAME_POINTER.
1096 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1097 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1098 aligned variables, which is reflected in ix86_can_eliminate.
1099 We normally still have the realigned STACK_POINTER that we can use.
1100 But if there is a stack restore still present at reload, it can trigger
1101 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1102 FRAME_POINTER into a hard reg.
1103 To prevent this situation, we force need_drap if we emit a stack
1104 restore. */
1105 if (SUPPORTS_STACK_ALIGNMENT)
1106 crtl->need_drap = true;
1107
59257ff7
RK
1108 /* See if this machine has anything special to do for this kind of save. */
1109 switch (save_level)
1110 {
59257ff7 1111 case SAVE_BLOCK:
4476e1a0
RS
1112 if (targetm.have_restore_stack_block ())
1113 fcn = targetm.gen_restore_stack_block;
59257ff7 1114 break;
59257ff7 1115 case SAVE_FUNCTION:
4476e1a0
RS
1116 if (targetm.have_restore_stack_function ())
1117 fcn = targetm.gen_restore_stack_function;
59257ff7 1118 break;
59257ff7 1119 case SAVE_NONLOCAL:
4476e1a0
RS
1120 if (targetm.have_restore_stack_nonlocal ())
1121 fcn = targetm.gen_restore_stack_nonlocal;
59257ff7 1122 break;
38a448ca
RH
1123 default:
1124 break;
59257ff7
RK
1125 }
1126
d072107f 1127 if (sa != 0)
260f91c2
DJ
1128 {
1129 sa = validize_mem (sa);
1130 /* These clobbers prevent the scheduler from moving
1131 references to variable arrays below the code
4b7e68e7 1132 that deletes (pops) the arrays. */
c41c1387
RS
1133 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1134 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1135 }
d072107f 1136
a494ed43
EB
1137 discard_pending_stack_adjust ();
1138
9eac0f2a 1139 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1140}
6de9cd9a
DN
1141
1142/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
d33606c3
EB
1143 function. This should be called whenever we allocate or deallocate
1144 dynamic stack space. */
6de9cd9a
DN
1145
1146void
1147update_nonlocal_goto_save_area (void)
1148{
1149 tree t_save;
1150 rtx r_save;
1151
1152 /* The nonlocal_goto_save_area object is an array of N pointers. The
1153 first one is used for the frame pointer save; the rest are sized by
1154 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1155 of the stack save area slots. */
6bbec3e1
L
1156 t_save = build4 (ARRAY_REF,
1157 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1158 cfun->nonlocal_goto_save_area,
3244e67d 1159 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1160 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1161
9eac0f2a 1162 emit_stack_save (SAVE_NONLOCAL, &r_save);
6de9cd9a 1163}
d33606c3
EB
1164
1165/* Record a new stack level for the current function. This should be called
1166 whenever we allocate or deallocate dynamic stack space. */
1167
1168void
1169record_new_stack_level (void)
1170{
1171 /* Record the new stack level for nonlocal gotos. */
1172 if (cfun->nonlocal_goto_save_area)
1173 update_nonlocal_goto_save_area ();
1174
1175 /* Record the new stack level for SJLJ exceptions. */
1176 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1177 update_sjlj_context ();
1178}
0358d788 1179
7072df0a 1180/* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
0358d788
RL
1181
1182rtx
7072df0a
DV
1183align_dynamic_address (rtx target, unsigned required_align)
1184{
1185 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1186 but we know it can't. So add ourselves and then do
1187 TRUNC_DIV_EXPR. */
1188 target = expand_binop (Pmode, add_optab, target,
1189 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1190 Pmode),
1191 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1192 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1193 gen_int_mode (required_align / BITS_PER_UNIT,
1194 Pmode),
1195 NULL_RTX, 1);
1196 target = expand_mult (Pmode, target,
1197 gen_int_mode (required_align / BITS_PER_UNIT,
1198 Pmode),
1199 NULL_RTX, 1);
18ca7dab 1200
7072df0a
DV
1201 return target;
1202}
18ca7dab 1203
7072df0a
DV
1204/* Return an rtx through *PSIZE, representing the size of an area of memory to
1205 be dynamically pushed on the stack.
1206
1207 *PSIZE is an rtx representing the size of the area.
091ad0b9 1208
3a42502d 1209 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
7072df0a 1210 parameter may be zero. If so, a proper value will be extracted
3a42502d
RH
1211 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1212
1213 REQUIRED_ALIGN is the alignment (in bits) required for the region
1214 of memory.
d3c12306 1215
7072df0a
DV
1216 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1217 the additional size returned. */
1218void
1219get_dynamic_stack_size (rtx *psize, unsigned size_align,
1220 unsigned required_align,
1221 HOST_WIDE_INT *pstack_usage_size)
18ca7dab 1222{
7072df0a 1223 rtx size = *psize;
d3c12306 1224
18ca7dab
RK
1225 /* Ensure the size is in the proper mode. */
1226 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1227 size = convert_to_mode (Pmode, size, 1);
1228
3a42502d
RH
1229 if (CONST_INT_P (size))
1230 {
1231 unsigned HOST_WIDE_INT lsb;
1232
1233 lsb = INTVAL (size);
1234 lsb &= -lsb;
1235
1236 /* Watch out for overflow truncating to "unsigned". */
1237 if (lsb > UINT_MAX / BITS_PER_UNIT)
1238 size_align = 1u << (HOST_BITS_PER_INT - 1);
1239 else
1240 size_align = (unsigned)lsb * BITS_PER_UNIT;
1241 }
1242 else if (size_align < BITS_PER_UNIT)
1243 size_align = BITS_PER_UNIT;
1244
34831f3e
RH
1245 /* We can't attempt to minimize alignment necessary, because we don't
1246 know the final value of preferred_stack_boundary yet while executing
1247 this code. */
1248 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1249 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1250
18ca7dab 1251 /* We will need to ensure that the address we return is aligned to
4fc0c9c8
DV
1252 REQUIRED_ALIGN. At this point in the compilation, we don't always
1253 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1254 (it might depend on the size of the outgoing parameter lists, for
1255 example), so we must preventively align the value. We leave space
1256 in SIZE for the hole that might result from the alignment operation. */
1ecad98e 1257
b9f92c0b
EB
1258 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1259 if (known_align == 0)
1260 known_align = BITS_PER_UNIT;
1261 if (required_align > known_align)
ae85ad3a 1262 {
b9f92c0b 1263 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
ae85ad3a
WD
1264 size = plus_constant (Pmode, size, extra);
1265 size = force_operand (size, NULL_RTX);
b9f92c0b
EB
1266 if (size_align > known_align)
1267 size_align = known_align;
ae85ad3a
WD
1268
1269 if (flag_stack_usage_info && pstack_usage_size)
1270 *pstack_usage_size += extra;
1271 }
1d9d04f8 1272
18ca7dab 1273 /* Round the size to a multiple of the required stack alignment.
1135a133 1274 Since the stack is presumed to be rounded before this allocation,
18ca7dab
RK
1275 this will maintain the required alignment.
1276
1277 If the stack grows downward, we could save an insn by subtracting
1278 SIZE from the stack pointer and then aligning the stack pointer.
1279 The problem with this is that the stack pointer may be unaligned
1280 between the execution of the subtraction and alignment insns and
1281 some machines do not allow this. Even on those that do, some
1282 signal handlers malfunction if a signal should occur between those
1283 insns. Since this is an extremely rare event, we have no reliable
1284 way of knowing which systems have this problem. So we avoid even
1285 momentarily mis-aligning the stack. */
3a42502d 1286 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1287 {
1288 size = round_push (size);
18ca7dab 1289
7072df0a 1290 if (flag_stack_usage_info && pstack_usage_size)
d3c12306 1291 {
32990d5b 1292 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
7072df0a
DV
1293 *pstack_usage_size =
1294 (*pstack_usage_size + align - 1) / align * align;
d3c12306
EB
1295 }
1296 }
1297
7072df0a
DV
1298 *psize = size;
1299}
1300
8c1dd970
JL
1301/* Return the number of bytes to "protect" on the stack for -fstack-check.
1302
1303 "protect" in the context of -fstack-check means how many bytes we
1304 should always ensure are available on the stack. More importantly
1305 this is how many bytes are skipped when probing the stack.
1306
1307 On some targets we want to reuse the -fstack-check prologue support
1308 to give a degree of protection against stack clashing style attacks.
1309
1310 In that scenario we do not want to skip bytes before probing as that
1311 would render the stack clash protections useless.
1312
1313 So we never use STACK_CHECK_PROTECT directly. Instead we indirect though
1314 this helper which allows us to provide different values for
1315 -fstack-check and -fstack-clash-protection. */
1316HOST_WIDE_INT
1317get_stack_check_protect (void)
1318{
1319 if (flag_stack_clash_protection)
1320 return 0;
1321 return STACK_CHECK_PROTECT;
1322}
1323
7072df0a
DV
1324/* Return an rtx representing the address of an area of memory dynamically
1325 pushed on the stack.
1326
1327 Any required stack pointer alignment is preserved.
1328
1329 SIZE is an rtx representing the size of the area.
1330
1331 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1332 parameter may be zero. If so, a proper value will be extracted
1333 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1334
1335 REQUIRED_ALIGN is the alignment (in bits) required for the region
1336 of memory.
1337
9e878cf1
EB
1338 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1339 no such upper bound is known.
1340
7072df0a
DV
1341 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1342 stack space allocated by the generated code cannot be added with itself
1343 in the course of the execution of the function. It is always safe to
1344 pass FALSE here and the following criterion is sufficient in order to
1345 pass TRUE: every path in the CFG that starts at the allocation point and
1346 loops to it executes the associated deallocation code. */
1347
1348rtx
1349allocate_dynamic_stack_space (rtx size, unsigned size_align,
9e878cf1
EB
1350 unsigned required_align,
1351 HOST_WIDE_INT max_size,
1352 bool cannot_accumulate)
7072df0a
DV
1353{
1354 HOST_WIDE_INT stack_usage_size = -1;
1355 rtx_code_label *final_label;
1356 rtx final_target, target;
1357
1358 /* If we're asking for zero bytes, it doesn't matter what we point
1359 to since we can't dereference it. But return a reasonable
1360 address anyway. */
1361 if (size == const0_rtx)
1362 return virtual_stack_dynamic_rtx;
1363
1364 /* Otherwise, show we're calling alloca or equivalent. */
1365 cfun->calls_alloca = 1;
1366
1367 /* If stack usage info is requested, look into the size we are passed.
1368 We need to do so this early to avoid the obfuscation that may be
1369 introduced later by the various alignment operations. */
1370 if (flag_stack_usage_info)
1371 {
1372 if (CONST_INT_P (size))
1373 stack_usage_size = INTVAL (size);
1374 else if (REG_P (size))
1375 {
1376 /* Look into the last emitted insn and see if we can deduce
1377 something for the register. */
1378 rtx_insn *insn;
1379 rtx set, note;
1380 insn = get_last_insn ();
1381 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1382 {
1383 if (CONST_INT_P (SET_SRC (set)))
1384 stack_usage_size = INTVAL (SET_SRC (set));
1385 else if ((note = find_reg_equal_equiv_note (insn))
1386 && CONST_INT_P (XEXP (note, 0)))
1387 stack_usage_size = INTVAL (XEXP (note, 0));
1388 }
1389 }
1390
9e878cf1
EB
1391 /* If the size is not constant, try the maximum size. */
1392 if (stack_usage_size < 0)
1393 stack_usage_size = max_size;
1394
1395 /* If the size is still not constant, we can't say anything. */
1396 if (stack_usage_size < 0)
7072df0a
DV
1397 {
1398 current_function_has_unbounded_dynamic_stack_size = 1;
1399 stack_usage_size = 0;
1400 }
1401 }
1402
1403 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1404
3a42502d 1405 target = gen_reg_rtx (Pmode);
7458026b 1406
d3c12306
EB
1407 /* The size is supposed to be fully adjusted at this point so record it
1408 if stack usage info is requested. */
a11e0df4 1409 if (flag_stack_usage_info)
d3c12306
EB
1410 {
1411 current_function_dynamic_stack_size += stack_usage_size;
1412
1413 /* ??? This is gross but the only safe stance in the absence
1414 of stack usage oriented flow analysis. */
1415 if (!cannot_accumulate)
1416 current_function_has_unbounded_dynamic_stack_size = 1;
1417 }
18ca7dab 1418
1c84b798
ILT
1419 do_pending_stack_adjust ();
1420
528a80c1 1421 final_label = NULL;
7458026b
ILT
1422 final_target = NULL_RTX;
1423
1424 /* If we are splitting the stack, we need to ask the backend whether
1425 there is enough room on the current stack. If there isn't, or if
1426 the backend doesn't know how to tell is, then we need to call a
1427 function to allocate memory in some other way. This memory will
1428 be released when we release the current stack segment. The
1429 effect is that stack allocation becomes less efficient, but at
1430 least it doesn't cause a stack overflow. */
1431 if (flag_split_stack)
1432 {
528a80c1
DM
1433 rtx_code_label *available_label;
1434 rtx ask, space, func;
7458026b 1435
528a80c1 1436 available_label = NULL;
7458026b 1437
10169a8b 1438 if (targetm.have_split_stack_space_check ())
7458026b
ILT
1439 {
1440 available_label = gen_label_rtx ();
1441
1442 /* This instruction will branch to AVAILABLE_LABEL if there
1443 are SIZE bytes available on the stack. */
10169a8b
RS
1444 emit_insn (targetm.gen_split_stack_space_check
1445 (size, available_label));
7458026b 1446 }
7458026b 1447
c3928dde 1448 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1449 memory using malloc. If the alignment of the memory returned
1450 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1451 make sure we allocate enough space. */
1452 if (MALLOC_ABI_ALIGNMENT >= required_align)
1453 ask = size;
1454 else
4fc0c9c8
DV
1455 ask = expand_binop (Pmode, add_optab, size,
1456 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1457 Pmode),
1458 NULL_RTX, 1, OPTAB_LIB_WIDEN);
c3928dde 1459
7458026b
ILT
1460 func = init_one_libfunc ("__morestack_allocate_stack_space");
1461
1462 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
db69559b 1463 ask, Pmode);
7458026b
ILT
1464
1465 if (available_label == NULL_RTX)
1466 return space;
1467
1468 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1469
1470 emit_move_insn (final_target, space);
1471
1472 final_label = gen_label_rtx ();
1473 emit_jump (final_label);
1474
1475 emit_label (available_label);
1476 }
1477
1503a7ec 1478 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1479 properly. */
a20c5714
RS
1480 gcc_assert (multiple_p (stack_pointer_delta,
1481 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1503a7ec 1482
d809253a
EB
1483 /* If needed, check that we have the required amount of stack. Take into
1484 account what has already been checked. */
1485 if (STACK_CHECK_MOVING_SP)
1486 ;
1487 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1488 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1489 size);
1490 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
8c1dd970 1491 probe_stack_range (get_stack_check_protect (), size);
edff2491 1492
efec771a
RH
1493 /* Don't let anti_adjust_stack emit notes. */
1494 suppress_reg_args_size = true;
1495
18ca7dab
RK
1496 /* Perform the required allocation from the stack. Some systems do
1497 this differently than simply incrementing/decrementing from the
38a448ca 1498 stack pointer, such as acquiring the space by calling malloc(). */
10169a8b 1499 if (targetm.have_allocate_stack ())
18ca7dab 1500 {
a5c7d693 1501 struct expand_operand ops[2];
4b6c1672
RK
1502 /* We don't have to check against the predicate for operand 0 since
1503 TARGET is known to be a pseudo of the proper mode, which must
a5c7d693
RS
1504 be valid for the operand. */
1505 create_fixed_operand (&ops[0], target);
1506 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
10169a8b 1507 expand_insn (targetm.code_for_allocate_stack, 2, ops);
18ca7dab
RK
1508 }
1509 else
ea534b63 1510 {
a20c5714 1511 poly_int64 saved_stack_pointer_delta;
32990d5b 1512
581edfa3
TS
1513 if (!STACK_GROWS_DOWNWARD)
1514 emit_move_insn (target, virtual_stack_dynamic_rtx);
a157febd
GK
1515
1516 /* Check stack bounds if necessary. */
e3b5732b 1517 if (crtl->limit_stack)
a157febd
GK
1518 {
1519 rtx available;
528a80c1 1520 rtx_code_label *space_available = gen_label_rtx ();
581edfa3
TS
1521 if (STACK_GROWS_DOWNWARD)
1522 available = expand_binop (Pmode, sub_optab,
1523 stack_pointer_rtx, stack_limit_rtx,
1524 NULL_RTX, 1, OPTAB_WIDEN);
1525 else
1526 available = expand_binop (Pmode, sub_optab,
1527 stack_limit_rtx, stack_pointer_rtx,
1528 NULL_RTX, 1, OPTAB_WIDEN);
1529
a157febd 1530 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1531 space_available);
eb6f47fb
RS
1532 if (targetm.have_trap ())
1533 emit_insn (targetm.gen_trap ());
a157febd 1534 else
a157febd
GK
1535 error ("stack limits not supported on this target");
1536 emit_barrier ();
1537 emit_label (space_available);
1538 }
1539
32990d5b 1540 saved_stack_pointer_delta = stack_pointer_delta;
9a08d230 1541
d809253a 1542 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1543 anti_adjust_stack_and_probe (size, false);
8c1dd970
JL
1544 else if (flag_stack_clash_protection)
1545 anti_adjust_stack_and_probe_stack_clash (size);
d809253a
EB
1546 else
1547 anti_adjust_stack (size);
9a08d230 1548
32990d5b
JJ
1549 /* Even if size is constant, don't modify stack_pointer_delta.
1550 The constant size alloca should preserve
1551 crtl->preferred_stack_boundary alignment. */
1552 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1553
581edfa3
TS
1554 if (STACK_GROWS_DOWNWARD)
1555 emit_move_insn (target, virtual_stack_dynamic_rtx);
38a448ca 1556 }
18ca7dab 1557
efec771a
RH
1558 suppress_reg_args_size = false;
1559
3a42502d
RH
1560 /* Finish up the split stack handling. */
1561 if (final_label != NULL_RTX)
1562 {
1563 gcc_assert (flag_split_stack);
1564 emit_move_insn (final_target, target);
1565 emit_label (final_label);
1566 target = final_target;
1567 }
1568
7072df0a 1569 target = align_dynamic_address (target, required_align);
d9b3eb63 1570
3a42502d
RH
1571 /* Now that we've committed to a return value, mark its alignment. */
1572 mark_reg_pointer (target, required_align);
1573
d33606c3
EB
1574 /* Record the new stack level. */
1575 record_new_stack_level ();
15fc0026 1576
18ca7dab
RK
1577 return target;
1578}
7072df0a
DV
1579
1580/* Return an rtx representing the address of an area of memory already
1581 statically pushed onto the stack in the virtual stack vars area. (It is
1582 assumed that the area is allocated in the function prologue.)
1583
1584 Any required stack pointer alignment is preserved.
1585
1586 OFFSET is the offset of the area into the virtual stack vars area.
1587
1588 REQUIRED_ALIGN is the alignment (in bits) required for the region
1589 of memory. */
1590
1591rtx
f075bd95 1592get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
7072df0a
DV
1593{
1594 rtx target;
1595
1596 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1597 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1598
1599 target = gen_reg_rtx (Pmode);
1600 emit_move_insn (target, virtual_stack_vars_rtx);
1601 target = expand_binop (Pmode, add_optab, target,
1602 gen_int_mode (offset, Pmode),
1603 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1604 target = align_dynamic_address (target, required_align);
1605
1606 /* Now that we've committed to a return value, mark its alignment. */
1607 mark_reg_pointer (target, required_align);
1608
1609 return target;
1610}
18ca7dab 1611\f
d9b3eb63 1612/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1613 run-time routine to call to check the stack, so provide a mechanism for
1614 calling that routine. */
1615
e2500fed 1616static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1617
1618void
d477d1fe 1619set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1620{
d477d1fe
SB
1621 gcc_assert (stack_check_libfunc == NULL_RTX);
1622 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
14a774a9
RK
1623}
1624\f
edff2491
RK
1625/* Emit one stack probe at ADDRESS, an address within the stack. */
1626
260c8ba3 1627void
502b8322 1628emit_stack_probe (rtx address)
edff2491 1629{
10169a8b 1630 if (targetm.have_probe_stack_address ())
5c35bc3e
KT
1631 {
1632 struct expand_operand ops[1];
1633 insn_code icode = targetm.code_for_probe_stack_address;
1634 create_address_operand (ops, address);
1635 maybe_legitimize_operands (icode, 0, 1, ops);
1636 expand_insn (icode, 1, ops);
1637 }
7b84aac0 1638 else
7b84aac0
EB
1639 {
1640 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491 1641
7b84aac0 1642 MEM_VOLATILE_P (memref) = 1;
5c35bc3e 1643 memref = validize_mem (memref);
edff2491 1644
7b84aac0 1645 /* See if we have an insn to probe the stack. */
10169a8b 1646 if (targetm.have_probe_stack ())
5c35bc3e 1647 emit_insn (targetm.gen_probe_stack (memref));
7b84aac0 1648 else
5c35bc3e 1649 emit_move_insn (memref, const0_rtx);
7b84aac0 1650 }
edff2491
RK
1651}
1652
d9b3eb63 1653/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1654 FIRST is a constant and size is a Pmode RTX. These are offsets from
1655 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1656 or subtract them from the stack pointer. */
1657
1658#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491 1659
62f9f30b 1660#if STACK_GROWS_DOWNWARD
edff2491 1661#define STACK_GROW_OP MINUS
d809253a
EB
1662#define STACK_GROW_OPTAB sub_optab
1663#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1664#else
1665#define STACK_GROW_OP PLUS
d809253a
EB
1666#define STACK_GROW_OPTAB add_optab
1667#define STACK_GROW_OFF(off) (off)
edff2491
RK
1668#endif
1669
1670void
502b8322 1671probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1672{
4b6c1672
RK
1673 /* First ensure SIZE is Pmode. */
1674 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1675 size = convert_to_mode (Pmode, size, 1);
1676
d809253a
EB
1677 /* Next see if we have a function to check the stack. */
1678 if (stack_check_libfunc)
f5f5363f 1679 {
d809253a 1680 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1681 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1682 stack_pointer_rtx,
0a81f074
RS
1683 plus_constant (Pmode,
1684 size, first)));
db69559b
RS
1685 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1686 addr, Pmode);
f5f5363f 1687 }
14a774a9 1688
d809253a 1689 /* Next see if we have an insn to check the stack. */
10169a8b 1690 else if (targetm.have_check_stack ())
edff2491 1691 {
a5c7d693 1692 struct expand_operand ops[1];
d809253a
EB
1693 rtx addr = memory_address (Pmode,
1694 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1695 stack_pointer_rtx,
0a81f074
RS
1696 plus_constant (Pmode,
1697 size, first)));
d6a6a07a 1698 bool success;
a5c7d693 1699 create_input_operand (&ops[0], addr, Pmode);
10169a8b 1700 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
d6a6a07a 1701 gcc_assert (success);
edff2491 1702 }
edff2491 1703
d809253a
EB
1704 /* Otherwise we have to generate explicit probes. If we have a constant
1705 small number of them to generate, that's the easy case. */
1706 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1707 {
d809253a
EB
1708 HOST_WIDE_INT isize = INTVAL (size), i;
1709 rtx addr;
1710
1711 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1712 it exceeds SIZE. If only one probe is needed, this will not
1713 generate any code. Then probe at FIRST + SIZE. */
1714 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1715 {
1716 addr = memory_address (Pmode,
0a81f074 1717 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1718 STACK_GROW_OFF (first + i)));
1719 emit_stack_probe (addr);
1720 }
1721
1722 addr = memory_address (Pmode,
0a81f074 1723 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1724 STACK_GROW_OFF (first + isize)));
1725 emit_stack_probe (addr);
edff2491
RK
1726 }
1727
d809253a
EB
1728 /* In the variable case, do the same as above, but in a loop. Note that we
1729 must be extra careful with variables wrapping around because we might be
1730 at the very top (or the very bottom) of the address space and we have to
1731 be able to handle this case properly; in particular, we use an equality
1732 test for the loop condition. */
edff2491
RK
1733 else
1734 {
d809253a 1735 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
528a80c1
DM
1736 rtx_code_label *loop_lab = gen_label_rtx ();
1737 rtx_code_label *end_lab = gen_label_rtx ();
edff2491 1738
d809253a
EB
1739 /* Step 1: round SIZE to the previous multiple of the interval. */
1740
1741 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1742 rounded_size
69a59f0f
RS
1743 = simplify_gen_binary (AND, Pmode, size,
1744 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1745 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1746
1747
1748 /* Step 2: compute initial and final value of the loop counter. */
1749
1750 /* TEST_ADDR = SP + FIRST. */
1751 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1752 stack_pointer_rtx,
4789c0ce
RS
1753 gen_int_mode (first, Pmode)),
1754 NULL_RTX);
d809253a
EB
1755
1756 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1757 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1758 test_addr,
1759 rounded_size_op), NULL_RTX);
1760
1761
1762 /* Step 3: the loop
1763
1764 while (TEST_ADDR != LAST_ADDR)
1765 {
1766 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1767 probe at TEST_ADDR
1768 }
1769
1770 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1771 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1772
1773 emit_label (loop_lab);
edff2491 1774
d809253a
EB
1775 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1776 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1777 end_lab);
1778
1779 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1780 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
2f1cd2eb 1781 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
edff2491 1782 1, OPTAB_WIDEN);
edff2491 1783
5b0264cb 1784 gcc_assert (temp == test_addr);
edff2491 1785
d809253a
EB
1786 /* Probe at TEST_ADDR. */
1787 emit_stack_probe (test_addr);
1788
1789 emit_jump (loop_lab);
1790
edff2491
RK
1791 emit_label (end_lab);
1792
d809253a
EB
1793
1794 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1795 that SIZE is equal to ROUNDED_SIZE. */
1796
1797 /* TEMP = SIZE - ROUNDED_SIZE. */
1798 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1799 if (temp != const0_rtx)
1800 {
1801 rtx addr;
1802
32990d5b 1803 if (CONST_INT_P (temp))
d809253a
EB
1804 {
1805 /* Use [base + disp} addressing mode if supported. */
1806 HOST_WIDE_INT offset = INTVAL (temp);
1807 addr = memory_address (Pmode,
0a81f074 1808 plus_constant (Pmode, last_addr,
d809253a
EB
1809 STACK_GROW_OFF (offset)));
1810 }
1811 else
1812 {
1813 /* Manual CSE if the difference is not known at compile-time. */
1814 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1815 addr = memory_address (Pmode,
1816 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1817 last_addr, temp));
1818 }
1819
1820 emit_stack_probe (addr);
1821 }
edff2491 1822 }
eabcc725
EB
1823
1824 /* Make sure nothing is scheduled before we are done. */
1825 emit_insn (gen_blockage ());
edff2491 1826}
d809253a 1827
8c1dd970
JL
1828/* Compute parameters for stack clash probing a dynamic stack
1829 allocation of SIZE bytes.
1830
1831 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1832
1833 Additionally we conditionally dump the type of probing that will
1834 be needed given the values computed. */
1835
1836void
1837compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1838 rtx *residual,
1839 HOST_WIDE_INT *probe_interval,
1840 rtx size)
1841{
1842 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1843 *probe_interval
1844 = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
1845 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1846 GEN_INT (-*probe_interval));
1847
1848 /* Compute the value of the stack pointer for the last iteration.
1849 It's just SP + ROUNDED_SIZE. */
1850 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1851 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1852 stack_pointer_rtx,
1853 rounded_size_op),
1854 NULL_RTX);
1855
1856 /* Compute any residuals not allocated by the loop above. Residuals
1857 are just the ROUNDED_SIZE - SIZE. */
1858 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1859
1860 /* Dump key information to make writing tests easy. */
1861 if (dump_file)
1862 {
1863 if (*rounded_size == CONST0_RTX (Pmode))
1864 fprintf (dump_file,
1865 "Stack clash skipped dynamic allocation and probing loop.\n");
94c23e39 1866 else if (CONST_INT_P (*rounded_size)
8c1dd970
JL
1867 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1868 fprintf (dump_file,
1869 "Stack clash dynamic allocation and probing inline.\n");
94c23e39 1870 else if (CONST_INT_P (*rounded_size))
8c1dd970
JL
1871 fprintf (dump_file,
1872 "Stack clash dynamic allocation and probing in "
1873 "rotated loop.\n");
1874 else
1875 fprintf (dump_file,
1876 "Stack clash dynamic allocation and probing in loop.\n");
1877
1878 if (*residual != CONST0_RTX (Pmode))
1879 fprintf (dump_file,
1880 "Stack clash dynamic allocation and probing residuals.\n");
1881 else
1882 fprintf (dump_file,
1883 "Stack clash skipped dynamic allocation and "
1884 "probing residuals.\n");
1885 }
1886}
1887
1888/* Emit the start of an allocate/probe loop for stack
1889 clash protection.
1890
1891 LOOP_LAB and END_LAB are returned for use when we emit the
1892 end of the loop.
1893
1894 LAST addr is the value for SP which stops the loop. */
1895void
1896emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1897 rtx *end_lab,
1898 rtx last_addr,
1899 bool rotated)
1900{
1901 /* Essentially we want to emit any setup code, the top of loop
1902 label and the comparison at the top of the loop. */
1903 *loop_lab = gen_label_rtx ();
1904 *end_lab = gen_label_rtx ();
1905
1906 emit_label (*loop_lab);
1907 if (!rotated)
1908 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1909 Pmode, 1, *end_lab);
1910}
1911
1912/* Emit the end of a stack clash probing loop.
1913
1914 This consists of just the jump back to LOOP_LAB and
1915 emitting END_LOOP after the loop. */
1916
1917void
1918emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1919 rtx last_addr, bool rotated)
1920{
1921 if (rotated)
1922 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1923 Pmode, 1, loop_lab);
1924 else
1925 emit_jump (loop_lab);
1926
1927 emit_label (end_loop);
1928
1929}
1930
1931/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1932 while probing it. This pushes when SIZE is positive. SIZE need not
1933 be constant.
1934
1935 This is subtly different than anti_adjust_stack_and_probe to try and
1936 prevent stack-clash attacks
1937
1938 1. It must assume no knowledge of the probing state, any allocation
1939 must probe.
1940
1941 Consider the case of a 1 byte alloca in a loop. If the sum of the
1942 allocations is large, then this could be used to jump the guard if
1943 probes were not emitted.
1944
1945 2. It never skips probes, whereas anti_adjust_stack_and_probe will
1946 skip probes on the first couple PROBE_INTERVALs on the assumption
1947 they're done elsewhere.
1948
1949 3. It only allocates and probes SIZE bytes, it does not need to
1950 allocate/probe beyond that because this probing style does not
1951 guarantee signal handling capability if the guard is hit. */
1952
1953static void
1954anti_adjust_stack_and_probe_stack_clash (rtx size)
1955{
1956 /* First ensure SIZE is Pmode. */
1957 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1958 size = convert_to_mode (Pmode, size, 1);
1959
1960 /* We can get here with a constant size on some targets. */
1961 rtx rounded_size, last_addr, residual;
2c25083e
TC
1962 HOST_WIDE_INT probe_interval, probe_range;
1963 bool target_probe_range_p = false;
8c1dd970
JL
1964 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1965 &residual, &probe_interval, size);
1966
2c25083e
TC
1967 /* Get the back-end specific probe ranges. */
1968 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1969 target_probe_range_p = probe_range != 0;
1970 gcc_assert (probe_range >= 0);
1971
1972 /* If no back-end specific range defined, default to the top of the newly
1973 allocated range. */
1974 if (probe_range == 0)
1975 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
1976
8c1dd970
JL
1977 if (rounded_size != CONST0_RTX (Pmode))
1978 {
94c23e39
JL
1979 if (CONST_INT_P (rounded_size)
1980 && INTVAL (rounded_size) <= 4 * probe_interval)
8c1dd970
JL
1981 {
1982 for (HOST_WIDE_INT i = 0;
1983 i < INTVAL (rounded_size);
1984 i += probe_interval)
1985 {
1986 anti_adjust_stack (GEN_INT (probe_interval));
8c1dd970
JL
1987 /* The prologue does not probe residuals. Thus the offset
1988 here to probe just beyond what the prologue had already
1989 allocated. */
1990 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e
TC
1991 probe_range));
1992
8c1dd970
JL
1993 emit_insn (gen_blockage ());
1994 }
1995 }
1996 else
1997 {
1998 rtx loop_lab, end_loop;
94c23e39 1999 bool rotate_loop = CONST_INT_P (rounded_size);
8c1dd970
JL
2000 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
2001 last_addr, rotate_loop);
2002
2003 anti_adjust_stack (GEN_INT (probe_interval));
2004
2005 /* The prologue does not probe residuals. Thus the offset here
2c25083e
TC
2006 to probe just beyond what the prologue had already
2007 allocated. */
8c1dd970 2008 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2c25083e 2009 probe_range));
8c1dd970
JL
2010
2011 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2012 last_addr, rotate_loop);
2013 emit_insn (gen_blockage ());
2014 }
2015 }
2016
2017 if (residual != CONST0_RTX (Pmode))
2018 {
86aa0691
JL
2019 rtx label = NULL_RTX;
2020 /* RESIDUAL could be zero at runtime and in that case *sp could
2021 hold live data. Furthermore, we do not want to probe into the
2022 red zone.
2023
2c25083e
TC
2024 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2025 probe at offset 0. In which case we no longer have to check for
2026 RESIDUAL == 0. However we still need to probe at the right offset
2027 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2028
2029 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2030 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2031 */
2032 anti_adjust_stack (residual);
2033
86aa0691
JL
2034 if (!CONST_INT_P (residual))
2035 {
2036 label = gen_label_rtx ();
2c25083e
TC
2037 rtx_code op = target_probe_range_p ? LT : EQ;
2038 rtx probe_cmp_value = target_probe_range_p
2039 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2040 : CONST0_RTX (GET_MODE (residual));
86aa0691 2041
2c25083e
TC
2042 if (target_probe_range_p)
2043 emit_stack_probe (stack_pointer_rtx);
8c1dd970 2044
2c25083e
TC
2045 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2046 op, NULL_RTX, Pmode, 1, label);
2047 }
2a6fc987 2048
2c25083e
TC
2049 rtx x = NULL_RTX;
2050
2051 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2052 by the ABI defined safe value. */
2053 if (!CONST_INT_P (residual) && target_probe_range_p)
2054 x = GEN_INT (probe_range);
2055 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2056 we still want to probe up, but the safest amount if a word. */
2057 else if (target_probe_range_p)
8c1dd970 2058 {
2c25083e
TC
2059 if (INTVAL (residual) <= probe_range)
2060 x = GEN_INT (GET_MODE_SIZE (word_mode));
2061 else
2062 x = GEN_INT (probe_range);
8c1dd970 2063 }
2c25083e
TC
2064 else
2065 /* If nothing else, probe at the top of the new allocation. */
2066 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2067
2068 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
86aa0691 2069
86aa0691 2070 emit_insn (gen_blockage ());
2c25083e
TC
2071 if (!CONST_INT_P (residual))
2072 emit_label (label);
8c1dd970
JL
2073 }
2074}
2075
2076
c35af30f
EB
2077/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2078 while probing it. This pushes when SIZE is positive. SIZE need not
2079 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2080 by plus SIZE at the end. */
d809253a 2081
c35af30f
EB
2082void
2083anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 2084{
c35af30f
EB
2085 /* We skip the probe for the first interval + a small dope of 4 words and
2086 probe that many bytes past the specified size to maintain a protection
2087 area at the botton of the stack. */
d809253a
EB
2088 const int dope = 4 * UNITS_PER_WORD;
2089
2090 /* First ensure SIZE is Pmode. */
2091 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2092 size = convert_to_mode (Pmode, size, 1);
2093
2094 /* If we have a constant small number of probes to generate, that's the
2095 easy case. */
32990d5b 2096 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
2097 {
2098 HOST_WIDE_INT isize = INTVAL (size), i;
2099 bool first_probe = true;
2100
260c8ba3 2101 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2102 values of N from 1 until it exceeds SIZE. If only one probe is
2103 needed, this will not generate any code. Then adjust and probe
2104 to PROBE_INTERVAL + SIZE. */
2105 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2106 {
2107 if (first_probe)
2108 {
2109 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2110 first_probe = false;
2111 }
2112 else
2113 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2114 emit_stack_probe (stack_pointer_rtx);
2115 }
2116
2117 if (first_probe)
0a81f074 2118 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d809253a 2119 else
0a81f074 2120 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
d809253a
EB
2121 emit_stack_probe (stack_pointer_rtx);
2122 }
2123
2124 /* In the variable case, do the same as above, but in a loop. Note that we
2125 must be extra careful with variables wrapping around because we might be
2126 at the very top (or the very bottom) of the address space and we have to
2127 be able to handle this case properly; in particular, we use an equality
2128 test for the loop condition. */
2129 else
2130 {
2131 rtx rounded_size, rounded_size_op, last_addr, temp;
528a80c1
DM
2132 rtx_code_label *loop_lab = gen_label_rtx ();
2133 rtx_code_label *end_lab = gen_label_rtx ();
d809253a
EB
2134
2135
2136 /* Step 1: round SIZE to the previous multiple of the interval. */
2137
2138 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2139 rounded_size
69a59f0f
RS
2140 = simplify_gen_binary (AND, Pmode, size,
2141 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
2142 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2143
2144
2145 /* Step 2: compute initial and final value of the loop counter. */
2146
2147 /* SP = SP_0 + PROBE_INTERVAL. */
2148 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2149
2150 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2151 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2152 stack_pointer_rtx,
2153 rounded_size_op), NULL_RTX);
2154
2155
2156 /* Step 3: the loop
2157
260c8ba3
EB
2158 while (SP != LAST_ADDR)
2159 {
2160 SP = SP + PROBE_INTERVAL
2161 probe at SP
2162 }
d809253a 2163
260c8ba3 2164 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
2165 values of N from 1 until it is equal to ROUNDED_SIZE. */
2166
2167 emit_label (loop_lab);
2168
2169 /* Jump to END_LAB if SP == LAST_ADDR. */
2170 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2171 Pmode, 1, end_lab);
2172
2173 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2174 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2175 emit_stack_probe (stack_pointer_rtx);
2176
2177 emit_jump (loop_lab);
2178
2179 emit_label (end_lab);
2180
2181
260c8ba3 2182 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
2183 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2184
2185 /* TEMP = SIZE - ROUNDED_SIZE. */
2186 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2187 if (temp != const0_rtx)
2188 {
2189 /* Manual CSE if the difference is not known at compile-time. */
2190 if (GET_CODE (temp) != CONST_INT)
2191 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2192 anti_adjust_stack (temp);
2193 emit_stack_probe (stack_pointer_rtx);
2194 }
2195 }
2196
c35af30f
EB
2197 /* Adjust back and account for the additional first interval. */
2198 if (adjust_back)
0a81f074 2199 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
c35af30f
EB
2200 else
2201 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
2202}
2203
18ca7dab
RK
2204/* Return an rtx representing the register or memory location
2205 in which a scalar value of data type VALTYPE
2206 was returned by a function call to function FUNC.
1d636cc6
RG
2207 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2208 function is known, otherwise 0.
4dc07bd7
JJ
2209 OUTGOING is 1 if on a machine with register windows this function
2210 should return the register in which the function will put its result
30f7a378 2211 and 0 otherwise. */
18ca7dab
RK
2212
2213rtx
586de218 2214hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 2215 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 2216{
4dc07bd7 2217 rtx val;
770ae6cc 2218
1d636cc6 2219 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 2220
f8cfc6aa 2221 if (REG_P (val)
e1a4071f
JL
2222 && GET_MODE (val) == BLKmode)
2223 {
974aedcc 2224 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
59b51186 2225 opt_scalar_int_mode tmpmode;
770ae6cc 2226
d9b3eb63 2227 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
2228 since the value of bytes will then be large enough that no
2229 mode will match anyway. */
d9b3eb63 2230
c94843d2 2231 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
0fb7aeda
KH
2232 {
2233 /* Have we found a large enough mode? */
59b51186 2234 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
0fb7aeda
KH
2235 break;
2236 }
e1a4071f 2237
59b51186 2238 PUT_MODE (val, tmpmode.require ());
d9b3eb63 2239 }
e1a4071f 2240 return val;
18ca7dab
RK
2241}
2242
2243/* Return an rtx representing the register or memory location
2244 in which a scalar value of mode MODE was returned by a library call. */
2245
2246rtx
ef4bddc2 2247hard_libcall_value (machine_mode mode, rtx fun)
18ca7dab 2248{
390b17c2 2249 return targetm.calls.libcall_value (mode, fun);
18ca7dab 2250}
0c5e217d
RS
2251
2252/* Look up the tree code for a given rtx code
5c88ea94 2253 to provide the arithmetic operation for real_arithmetic.
0c5e217d
RS
2254 The function returns an int because the caller may not know
2255 what `enum tree_code' means. */
2256
2257int
502b8322 2258rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
2259{
2260 enum tree_code tcode;
2261
2262 switch (code)
2263 {
2264 case PLUS:
2265 tcode = PLUS_EXPR;
2266 break;
2267 case MINUS:
2268 tcode = MINUS_EXPR;
2269 break;
2270 case MULT:
2271 tcode = MULT_EXPR;
2272 break;
2273 case DIV:
2274 tcode = RDIV_EXPR;
2275 break;
2276 case SMIN:
2277 tcode = MIN_EXPR;
2278 break;
2279 case SMAX:
2280 tcode = MAX_EXPR;
2281 break;
2282 default:
2283 tcode = LAST_AND_UNUSED_TREE_CODE;
2284 break;
2285 }
2286 return ((int) tcode);
2287}
e2500fed
GK
2288
2289#include "gt-explow.h"