]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
S/390: Fix alignment check for literal pool references.
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
818ab71a 2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
18ca7dab 3
1322177d 4This file is part of GCC.
18ca7dab 5
1322177d
LB
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
1322177d 9version.
18ca7dab 10
1322177d
LB
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
18ca7dab
RK
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
18ca7dab
RK
19
20
21#include "config.h"
670ee920 22#include "system.h"
4977bab6 23#include "coretypes.h"
957060b5
AM
24#include "target.h"
25#include "function.h"
18ca7dab
RK
26#include "rtl.h"
27#include "tree.h"
6baf1cc8 28#include "tm_p.h"
957060b5
AM
29#include "expmed.h"
30#include "optabs.h"
31#include "emit-rtl.h"
32#include "recog.h"
33#include "diagnostic-core.h"
957060b5 34#include "stor-layout.h"
b38f3813 35#include "except.h"
36566b39
PK
36#include "dojump.h"
37#include "explow.h"
18ca7dab 38#include "expr.h"
677f3fa8 39#include "common/common-target.h"
aacd3885 40#include "output.h"
18ca7dab 41
502b8322 42static rtx break_out_memory_refs (rtx);
7e4ce834
RH
43
44
45/* Truncate and perhaps sign-extend C as appropriate for MODE. */
46
47HOST_WIDE_INT
ef4bddc2 48trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
7e4ce834 49{
5511bc5a 50 int width = GET_MODE_PRECISION (mode);
7e4ce834 51
71012d97 52 /* You want to truncate to a _what_? */
d5e254e1
IE
53 gcc_assert (SCALAR_INT_MODE_P (mode)
54 || POINTER_BOUNDS_MODE_P (mode));
71012d97 55
1f3f36d1
RH
56 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
57 if (mode == BImode)
58 return c & 1 ? STORE_FLAG_VALUE : 0;
59
5b0d91c3
AO
60 /* Sign-extend for the requested mode. */
61
62 if (width < HOST_BITS_PER_WIDE_INT)
63 {
64 HOST_WIDE_INT sign = 1;
65 sign <<= width - 1;
66 c &= (sign << 1) - 1;
67 c ^= sign;
68 c -= sign;
69 }
7e4ce834
RH
70
71 return c;
72}
73
929e10f4 74/* Return an rtx for the sum of X and the integer C, given that X has
23b33725
RS
75 mode MODE. INPLACE is true if X can be modified inplace or false
76 if it must be treated as immutable. */
18ca7dab
RK
77
78rtx
ef4bddc2 79plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
23b33725 80 bool inplace)
18ca7dab 81{
b3694847 82 RTX_CODE code;
17ab7c59 83 rtx y;
b3694847 84 rtx tem;
18ca7dab
RK
85 int all_constant = 0;
86
0a81f074
RS
87 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
88
18ca7dab
RK
89 if (c == 0)
90 return x;
91
92 restart:
93
94 code = GET_CODE (x);
17ab7c59
RK
95 y = x;
96
18ca7dab
RK
97 switch (code)
98 {
807e902e
KZ
99 CASE_CONST_SCALAR_INT:
100 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
101 mode);
18ca7dab
RK
102 case MEM:
103 /* If this is a reference to the constant pool, try replacing it with
104 a reference to a new constant. If the resulting address isn't
105 valid, don't return it because we have no way to validize it. */
106 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
107 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
108 {
0a81f074 109 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
929e10f4 110 tem = force_const_mem (GET_MODE (x), tem);
2c19378b
AB
111 /* Targets may disallow some constants in the constant pool, thus
112 force_const_mem may return NULL_RTX. */
113 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
18ca7dab
RK
114 return tem;
115 }
116 break;
117
118 case CONST:
119 /* If adding to something entirely constant, set a flag
120 so that we can add a CONST around the result. */
23b33725
RS
121 if (inplace && shared_const_p (x))
122 inplace = false;
18ca7dab
RK
123 x = XEXP (x, 0);
124 all_constant = 1;
125 goto restart;
126
127 case SYMBOL_REF:
128 case LABEL_REF:
129 all_constant = 1;
130 break;
131
132 case PLUS:
929e10f4
MS
133 /* The interesting case is adding the integer to a sum. Look
134 for constant term in the sum and combine with C. For an
135 integer constant term or a constant term that is not an
136 explicit integer, we combine or group them together anyway.
03d937fc
R
137
138 We may not immediately return from the recursive call here, lest
139 all_constant gets lost. */
e5671f2b 140
929e10f4 141 if (CONSTANT_P (XEXP (x, 1)))
03d937fc 142 {
23b33725
RS
143 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
144 if (term == const0_rtx)
145 x = XEXP (x, 0);
146 else if (inplace)
147 XEXP (x, 1) = term;
148 else
149 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
03d937fc
R
150 c = 0;
151 }
23b33725 152 else if (rtx *const_loc = find_constant_term_loc (&y))
03d937fc 153 {
23b33725
RS
154 if (!inplace)
155 {
156 /* We need to be careful since X may be shared and we can't
157 modify it in place. */
158 x = copy_rtx (x);
159 const_loc = find_constant_term_loc (&x);
160 }
161 *const_loc = plus_constant (mode, *const_loc, c, true);
03d937fc
R
162 c = 0;
163 }
38a448ca 164 break;
ed8908e7 165
38a448ca
RH
166 default:
167 break;
18ca7dab
RK
168 }
169
170 if (c != 0)
4789c0ce 171 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
18ca7dab
RK
172
173 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
174 return x;
175 else if (all_constant)
38a448ca 176 return gen_rtx_CONST (mode, x);
18ca7dab
RK
177 else
178 return x;
179}
18ca7dab
RK
180\f
181/* If X is a sum, return a new sum like X but lacking any constant terms.
182 Add all the removed constant terms into *CONSTPTR.
183 X itself is not altered. The result != X if and only if
184 it is not isomorphic to X. */
185
186rtx
502b8322 187eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 188{
b3694847 189 rtx x0, x1;
18ca7dab
RK
190 rtx tem;
191
192 if (GET_CODE (x) != PLUS)
193 return x;
194
195 /* First handle constants appearing at this level explicitly. */
481683e1 196 if (CONST_INT_P (XEXP (x, 1))
18ca7dab
RK
197 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
198 XEXP (x, 1)))
481683e1 199 && CONST_INT_P (tem))
18ca7dab
RK
200 {
201 *constptr = tem;
202 return eliminate_constant_term (XEXP (x, 0), constptr);
203 }
204
205 tem = const0_rtx;
206 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
207 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
208 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
209 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
210 *constptr, tem))
481683e1 211 && CONST_INT_P (tem))
18ca7dab
RK
212 {
213 *constptr = tem;
38a448ca 214 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
215 }
216
217 return x;
218}
219
18ca7dab
RK
220\f
221/* Return a copy of X in which all memory references
222 and all constants that involve symbol refs
223 have been replaced with new temporary registers.
224 Also emit code to load the memory locations and constants
225 into those registers.
226
227 If X contains no such constants or memory references,
228 X itself (not a copy) is returned.
229
230 If a constant is found in the address that is not a legitimate constant
231 in an insn, it is left alone in the hope that it might be valid in the
232 address.
233
234 X may contain no arithmetic except addition, subtraction and multiplication.
235 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
236
237static rtx
502b8322 238break_out_memory_refs (rtx x)
18ca7dab 239{
3c0cb5de 240 if (MEM_P (x)
cabeca29 241 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 242 && GET_MODE (x) != VOIDmode))
2cca6e3f 243 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
244 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
245 || GET_CODE (x) == MULT)
246 {
b3694847
SS
247 rtx op0 = break_out_memory_refs (XEXP (x, 0));
248 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 249
18ca7dab 250 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 251 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 252 }
2cca6e3f 253
18ca7dab
RK
254 return x;
255}
256
d4ebfa65
BE
257/* Given X, a memory address in address space AS' pointer mode, convert it to
258 an address in the address space's address mode, or vice versa (TO_MODE says
259 which way). We take advantage of the fact that pointers are not allowed to
260 overflow by commuting arithmetic operations over conversions so that address
7745730f 261 arithmetic insns can be used. IN_CONST is true if this conversion is inside
3d3f9e7e
JJ
262 a CONST. NO_EMIT is true if no insns should be emitted, and instead
263 it should return NULL if it can't be simplified without emitting insns. */
ea534b63 264
3d3f9e7e 265rtx
ef4bddc2 266convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
7745730f 267 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
3d3f9e7e
JJ
268 bool in_const ATTRIBUTE_UNUSED,
269 bool no_emit ATTRIBUTE_UNUSED)
ea534b63 270{
5ae6cd0d 271#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 272 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
273 return x;
274#else /* defined(POINTERS_EXTEND_UNSIGNED) */
ef4bddc2 275 machine_mode pointer_mode, address_mode, from_mode;
498b529f 276 rtx temp;
aa0f70e6 277 enum rtx_code code;
498b529f 278
5ae6cd0d
MM
279 /* If X already has the right mode, just return it. */
280 if (GET_MODE (x) == to_mode)
281 return x;
282
d4ebfa65
BE
283 pointer_mode = targetm.addr_space.pointer_mode (as);
284 address_mode = targetm.addr_space.address_mode (as);
285 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 286
0b04ec8c
RK
287 /* Here we handle some special cases. If none of them apply, fall through
288 to the default case. */
ea534b63
RK
289 switch (GET_CODE (x))
290 {
d8116890 291 CASE_CONST_SCALAR_INT:
aa0f70e6
SE
292 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
293 code = TRUNCATE;
294 else if (POINTERS_EXTEND_UNSIGNED < 0)
295 break;
296 else if (POINTERS_EXTEND_UNSIGNED > 0)
297 code = ZERO_EXTEND;
298 else
299 code = SIGN_EXTEND;
300 temp = simplify_unary_operation (code, to_mode, x, from_mode);
301 if (temp)
302 return temp;
303 break;
498b529f 304
d1405722 305 case SUBREG:
5da4f548 306 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 307 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
308 return SUBREG_REG (x);
309 break;
310
ea534b63 311 case LABEL_REF:
a827d9b1 312 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
5da4f548
SE
313 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
314 return temp;
498b529f 315
ea534b63 316 case SYMBOL_REF:
ce02ba25
EC
317 temp = shallow_copy_rtx (x);
318 PUT_MODE (temp, to_mode);
5da4f548 319 return temp;
ea534b63 320
498b529f 321 case CONST:
3d3f9e7e
JJ
322 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
323 true, no_emit);
324 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
ea534b63 325
0b04ec8c
RK
326 case PLUS:
327 case MULT:
ceeb2cbc
AP
328 /* For addition we can safely permute the conversion and addition
329 operation if one operand is a constant and converting the constant
330 does not change it or if one operand is a constant and we are
331 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
17939c98 332 We can always safely permute them if we are making the address
7745730f
AP
333 narrower. Inside a CONST RTL, this is safe for both pointers
334 zero or sign extended as pointers cannot wrap. */
aa0f70e6
SE
335 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
336 || (GET_CODE (x) == PLUS
481683e1 337 && CONST_INT_P (XEXP (x, 1))
7745730f
AP
338 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
339 || XEXP (x, 1) == convert_memory_address_addr_space_1
3d3f9e7e
JJ
340 (to_mode, XEXP (x, 1), as, in_const,
341 no_emit)
7745730f 342 || POINTERS_EXTEND_UNSIGNED < 0)))
3d3f9e7e
JJ
343 {
344 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
345 as, in_const, no_emit);
b88990be
JJ
346 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
347 temp, XEXP (x, 1))
348 : temp);
3d3f9e7e 349 }
38a448ca 350 break;
d9b3eb63 351
38a448ca
RH
352 default:
353 break;
ea534b63 354 }
0b04ec8c 355
3d3f9e7e
JJ
356 if (no_emit)
357 return NULL_RTX;
358
0b04ec8c
RK
359 return convert_modes (to_mode, from_mode,
360 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 361#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 362}
7745730f
AP
363
364/* Given X, a memory address in address space AS' pointer mode, convert it to
365 an address in the address space's address mode, or vice versa (TO_MODE says
366 which way). We take advantage of the fact that pointers are not allowed to
367 overflow by commuting arithmetic operations over conversions so that address
368 arithmetic insns can be used. */
369
370rtx
ef4bddc2 371convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
7745730f 372{
3d3f9e7e 373 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
7745730f 374}
18ca7dab 375\f
36566b39 376
09e881c9
BE
377/* Return something equivalent to X but valid as a memory address for something
378 of mode MODE in the named address space AS. When X is not itself valid,
379 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
380
381rtx
ef4bddc2 382memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
18ca7dab 383{
b3694847 384 rtx oldx = x;
ef4bddc2 385 machine_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 386
d4ebfa65 387 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 388
ba228239 389 /* By passing constant addresses through registers
18ca7dab 390 we get a chance to cse them. */
cabeca29 391 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 392 x = force_reg (address_mode, x);
18ca7dab 393
18ca7dab
RK
394 /* We get better cse by rejecting indirect addressing at this stage.
395 Let the combiner create indirect addresses where appropriate.
396 For now, generate the code so that the subexpressions useful to share
397 are visible. But not if cse won't be done! */
18b9ca6f 398 else
18ca7dab 399 {
f8cfc6aa 400 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
401 x = break_out_memory_refs (x);
402
403 /* At this point, any valid address is accepted. */
09e881c9 404 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 405 goto done;
18b9ca6f
RK
406
407 /* If it was valid before but breaking out memory refs invalidated it,
408 use it the old way. */
09e881c9 409 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
410 {
411 x = oldx;
412 goto done;
413 }
18b9ca6f
RK
414
415 /* Perform machine-dependent transformations on X
416 in certain cases. This is not necessary since the code
417 below can handle all possible cases, but machine-dependent
418 transformations can make better code. */
506d7b68 419 {
09e881c9
BE
420 rtx orig_x = x;
421 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
422 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
423 goto done;
424 }
18b9ca6f
RK
425
426 /* PLUS and MULT can appear in special ways
427 as the result of attempts to make an address usable for indexing.
428 Usually they are dealt with by calling force_operand, below.
429 But a sum containing constant terms is special
430 if removing them makes the sum a valid address:
431 then we generate that address in a register
432 and index off of it. We do this because it often makes
433 shorter code, and because the addresses thus generated
434 in registers often become common subexpressions. */
435 if (GET_CODE (x) == PLUS)
436 {
437 rtx constant_term = const0_rtx;
438 rtx y = eliminate_constant_term (x, &constant_term);
439 if (constant_term == const0_rtx
09e881c9 440 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
441 x = force_operand (x, NULL_RTX);
442 else
443 {
38a448ca 444 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 445 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
446 x = force_operand (x, NULL_RTX);
447 else
448 x = y;
449 }
450 }
18ca7dab 451
e475ed2a 452 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 453 x = force_operand (x, NULL_RTX);
18ca7dab 454
18b9ca6f
RK
455 /* If we have a register that's an invalid address,
456 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 457 else if (REG_P (x))
18b9ca6f
RK
458 x = copy_to_reg (x);
459
460 /* Last resort: copy the value to a register, since
461 the register is a valid address. */
462 else
d4ebfa65 463 x = force_reg (address_mode, x);
18ca7dab 464 }
18b9ca6f
RK
465
466 done:
467
09e881c9 468 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
469 /* If we didn't change the address, we are done. Otherwise, mark
470 a reg as a pointer if we have REG or REG + CONST_INT. */
471 if (oldx == x)
472 return x;
f8cfc6aa 473 else if (REG_P (x))
bdb429a5 474 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 475 else if (GET_CODE (x) == PLUS
f8cfc6aa 476 && REG_P (XEXP (x, 0))
481683e1 477 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 478 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 479
18b9ca6f
RK
480 /* OLDX may have been the address on a temporary. Update the address
481 to indicate that X is now used. */
482 update_temp_slot_address (oldx, x);
483
18ca7dab
RK
484 return x;
485}
486
1a8cb155
RS
487/* If REF is a MEM with an invalid address, change it into a valid address.
488 Pass through anything else unchanged. REF must be an unshared rtx and
489 the function may modify it in-place. */
18ca7dab
RK
490
491rtx
502b8322 492validize_mem (rtx ref)
18ca7dab 493{
3c0cb5de 494 if (!MEM_P (ref))
18ca7dab 495 return ref;
aacd3885 496 ref = use_anchored_address (ref);
09e881c9
BE
497 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
498 MEM_ADDR_SPACE (ref)))
18ca7dab 499 return ref;
792760b9 500
1a8cb155 501 return replace_equiv_address (ref, XEXP (ref, 0), true);
18ca7dab 502}
aacd3885
RS
503
504/* If X is a memory reference to a member of an object block, try rewriting
505 it to use an anchor instead. Return the new memory reference on success
506 and the old one on failure. */
507
508rtx
509use_anchored_address (rtx x)
510{
511 rtx base;
512 HOST_WIDE_INT offset;
ef4bddc2 513 machine_mode mode;
aacd3885
RS
514
515 if (!flag_section_anchors)
516 return x;
517
518 if (!MEM_P (x))
519 return x;
520
521 /* Split the address into a base and offset. */
522 base = XEXP (x, 0);
523 offset = 0;
524 if (GET_CODE (base) == CONST
525 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 526 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
527 {
528 offset += INTVAL (XEXP (XEXP (base, 0), 1));
529 base = XEXP (XEXP (base, 0), 0);
530 }
531
532 /* Check whether BASE is suitable for anchors. */
533 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 534 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 535 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 536 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
537 || !targetm.use_anchors_for_symbol_p (base))
538 return x;
539
540 /* Decide where BASE is going to be. */
541 place_block_symbol (base);
542
543 /* Get the anchor we need to use. */
544 offset += SYMBOL_REF_BLOCK_OFFSET (base);
545 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
546 SYMBOL_REF_TLS_MODEL (base));
547
548 /* Work out the offset from the anchor. */
549 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
550
551 /* If we're going to run a CSE pass, force the anchor into a register.
552 We will then be able to reuse registers for several accesses, if the
553 target costs say that that's worthwhile. */
0a81f074 554 mode = GET_MODE (base);
aacd3885 555 if (!cse_not_expected)
0a81f074 556 base = force_reg (mode, base);
aacd3885 557
0a81f074 558 return replace_equiv_address (x, plus_constant (mode, base, offset));
aacd3885 559}
18ca7dab 560\f
18ca7dab
RK
561/* Copy the value or contents of X to a new temp reg and return that reg. */
562
563rtx
502b8322 564copy_to_reg (rtx x)
18ca7dab 565{
b3694847 566 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 567
18ca7dab 568 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 569 do the computation. */
18ca7dab
RK
570 if (! general_operand (x, VOIDmode))
571 x = force_operand (x, temp);
d9b3eb63 572
18ca7dab
RK
573 if (x != temp)
574 emit_move_insn (temp, x);
575
576 return temp;
577}
578
579/* Like copy_to_reg but always give the new register mode Pmode
580 in case X is a constant. */
581
582rtx
502b8322 583copy_addr_to_reg (rtx x)
18ca7dab
RK
584{
585 return copy_to_mode_reg (Pmode, x);
586}
587
588/* Like copy_to_reg but always give the new register mode MODE
589 in case X is a constant. */
590
591rtx
ef4bddc2 592copy_to_mode_reg (machine_mode mode, rtx x)
18ca7dab 593{
b3694847 594 rtx temp = gen_reg_rtx (mode);
d9b3eb63 595
18ca7dab 596 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 597 do the computation. */
18ca7dab
RK
598 if (! general_operand (x, VOIDmode))
599 x = force_operand (x, temp);
600
5b0264cb 601 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
602 if (x != temp)
603 emit_move_insn (temp, x);
604 return temp;
605}
606
607/* Load X into a register if it is not already one.
608 Use mode MODE for the register.
609 X should be valid for mode MODE, but it may be a constant which
610 is valid for all integer modes; that's why caller must specify MODE.
611
612 The caller must not alter the value in the register we return,
613 since we mark it as a "constant" register. */
614
615rtx
ef4bddc2 616force_reg (machine_mode mode, rtx x)
18ca7dab 617{
528a80c1
DM
618 rtx temp, set;
619 rtx_insn *insn;
18ca7dab 620
f8cfc6aa 621 if (REG_P (x))
18ca7dab 622 return x;
d9b3eb63 623
e3c8ea67
RH
624 if (general_operand (x, mode))
625 {
626 temp = gen_reg_rtx (mode);
627 insn = emit_move_insn (temp, x);
628 }
629 else
630 {
631 temp = force_operand (x, NULL_RTX);
f8cfc6aa 632 if (REG_P (temp))
e3c8ea67
RH
633 insn = get_last_insn ();
634 else
635 {
636 rtx temp2 = gen_reg_rtx (mode);
637 insn = emit_move_insn (temp2, temp);
638 temp = temp2;
639 }
640 }
62874575 641
18ca7dab 642 /* Let optimizers know that TEMP's value never changes
62874575
RK
643 and that X can be substituted for it. Don't get confused
644 if INSN set something else (such as a SUBREG of TEMP). */
645 if (CONSTANT_P (x)
646 && (set = single_set (insn)) != 0
fd7acc30
RS
647 && SET_DEST (set) == temp
648 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 649 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 650
4a4f95d9
RH
651 /* Let optimizers know that TEMP is a pointer, and if so, the
652 known alignment of that pointer. */
653 {
654 unsigned align = 0;
655 if (GET_CODE (x) == SYMBOL_REF)
656 {
657 align = BITS_PER_UNIT;
658 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
659 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
660 }
661 else if (GET_CODE (x) == LABEL_REF)
662 align = BITS_PER_UNIT;
663 else if (GET_CODE (x) == CONST
664 && GET_CODE (XEXP (x, 0)) == PLUS
665 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 666 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
667 {
668 rtx s = XEXP (XEXP (x, 0), 0);
669 rtx c = XEXP (XEXP (x, 0), 1);
670 unsigned sa, ca;
671
672 sa = BITS_PER_UNIT;
673 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
674 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
675
bd95721f
RH
676 if (INTVAL (c) == 0)
677 align = sa;
678 else
679 {
680 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
681 align = MIN (sa, ca);
682 }
4a4f95d9
RH
683 }
684
0a317111 685 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
686 mark_reg_pointer (temp, align);
687 }
688
18ca7dab
RK
689 return temp;
690}
691
692/* If X is a memory ref, copy its contents to a new temp reg and return
693 that reg. Otherwise, return X. */
694
695rtx
502b8322 696force_not_mem (rtx x)
18ca7dab 697{
b3694847 698 rtx temp;
fe3439b0 699
3c0cb5de 700 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 701 return x;
fe3439b0 702
18ca7dab 703 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
704
705 if (MEM_POINTER (x))
706 REG_POINTER (temp) = 1;
707
18ca7dab
RK
708 emit_move_insn (temp, x);
709 return temp;
710}
711
712/* Copy X to TARGET (if it's nonzero and a reg)
713 or to a new temp reg and return that reg.
714 MODE is the mode to use for X in case it is a constant. */
715
716rtx
ef4bddc2 717copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
18ca7dab 718{
b3694847 719 rtx temp;
18ca7dab 720
f8cfc6aa 721 if (target && REG_P (target))
18ca7dab
RK
722 temp = target;
723 else
724 temp = gen_reg_rtx (mode);
725
726 emit_move_insn (temp, x);
727 return temp;
728}
729\f
cde0f3fd 730/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
731 PUNSIGNEDP points to the signedness of the type and may be adjusted
732 to show what signedness to use on extension operations.
733
cde0f3fd
PB
734 FOR_RETURN is nonzero if the caller is promoting the return value
735 of FNDECL, else it is for promoting args. */
9ff65789 736
ef4bddc2
RS
737machine_mode
738promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
cde0f3fd
PB
739 const_tree funtype, int for_return)
740{
5e617be8
AK
741 /* Called without a type node for a libcall. */
742 if (type == NULL_TREE)
743 {
744 if (INTEGRAL_MODE_P (mode))
745 return targetm.calls.promote_function_mode (NULL_TREE, mode,
746 punsignedp, funtype,
747 for_return);
748 else
749 return mode;
750 }
751
cde0f3fd
PB
752 switch (TREE_CODE (type))
753 {
754 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
755 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
756 case POINTER_TYPE: case REFERENCE_TYPE:
757 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
758 for_return);
759
760 default:
761 return mode;
762 }
763}
764/* Return the mode to use to store a scalar of TYPE and MODE.
765 PUNSIGNEDP points to the signedness of the type and may be adjusted
766 to show what signedness to use on extension operations. */
d4453b7a 767
ef4bddc2
RS
768machine_mode
769promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
b1680483 770 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 771{
1e3287d0
RG
772#ifdef PROMOTE_MODE
773 enum tree_code code;
774 int unsignedp;
775#endif
776
5e617be8
AK
777 /* For libcalls this is invoked without TYPE from the backends
778 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
779 case. */
780 if (type == NULL_TREE)
781 return mode;
782
cde0f3fd
PB
783 /* FIXME: this is the same logic that was there until GCC 4.4, but we
784 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
785 is not defined. The affected targets are M32C, S390, SPARC. */
786#ifdef PROMOTE_MODE
1e3287d0
RG
787 code = TREE_CODE (type);
788 unsignedp = *punsignedp;
9ff65789 789
9ff65789
RK
790 switch (code)
791 {
9ff65789 792 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 793 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
cde0f3fd
PB
794 PROMOTE_MODE (mode, unsignedp, type);
795 *punsignedp = unsignedp;
796 return mode;
9ff65789 797 break;
9ff65789 798
ea534b63 799#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 800 case REFERENCE_TYPE:
9ff65789 801 case POINTER_TYPE:
cde0f3fd 802 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
803 return targetm.addr_space.address_mode
804 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
9ff65789 805 break;
ea534b63 806#endif
d9b3eb63 807
38a448ca 808 default:
cde0f3fd 809 return mode;
9ff65789 810 }
cde0f3fd 811#else
9ff65789 812 return mode;
cde0f3fd 813#endif
9ff65789 814}
cde0f3fd
PB
815
816
817/* Use one of promote_mode or promote_function_mode to find the promoted
818 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
819 of DECL after promotion. */
820
ef4bddc2 821machine_mode
cde0f3fd
PB
822promote_decl_mode (const_tree decl, int *punsignedp)
823{
824 tree type = TREE_TYPE (decl);
825 int unsignedp = TYPE_UNSIGNED (type);
ef4bddc2
RS
826 machine_mode mode = DECL_MODE (decl);
827 machine_mode pmode;
cde0f3fd 828
f11a7b6d
AO
829 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
830 pmode = promote_function_mode (type, mode, &unsignedp,
831 TREE_TYPE (current_function_decl), 1);
832 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 833 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 834 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
835 else
836 pmode = promote_mode (type, mode, &unsignedp);
837
838 if (punsignedp)
839 *punsignedp = unsignedp;
840 return pmode;
841}
842
1f9ceff1
AO
843/* Return the promoted mode for name. If it is a named SSA_NAME, it
844 is the same as promote_decl_mode. Otherwise, it is the promoted
845 mode of a temp decl of same type as the SSA_NAME, if we had created
846 one. */
847
848machine_mode
849promote_ssa_mode (const_tree name, int *punsignedp)
850{
851 gcc_assert (TREE_CODE (name) == SSA_NAME);
852
853 /* Partitions holding parms and results must be promoted as expected
854 by function.c. */
855 if (SSA_NAME_VAR (name)
856 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
857 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
f11a7b6d
AO
858 {
859 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
860 if (mode != BLKmode)
861 return mode;
862 }
1f9ceff1
AO
863
864 tree type = TREE_TYPE (name);
865 int unsignedp = TYPE_UNSIGNED (type);
866 machine_mode mode = TYPE_MODE (type);
867
f11a7b6d
AO
868 /* Bypass TYPE_MODE when it maps vector modes to BLKmode. */
869 if (mode == BLKmode)
870 {
871 gcc_assert (VECTOR_TYPE_P (type));
872 mode = type->type_common.mode;
873 }
874
1f9ceff1
AO
875 machine_mode pmode = promote_mode (type, mode, &unsignedp);
876 if (punsignedp)
877 *punsignedp = unsignedp;
878
879 return pmode;
880}
881
882
9ff65789 883\f
9c582551 884/* Controls the behavior of {anti_,}adjust_stack. */
9a08d230
RH
885static bool suppress_reg_args_size;
886
887/* A helper for adjust_stack and anti_adjust_stack. */
888
889static void
890adjust_stack_1 (rtx adjust, bool anti_p)
891{
528a80c1
DM
892 rtx temp;
893 rtx_insn *insn;
9a08d230 894
9a08d230 895 /* Hereafter anti_p means subtract_p. */
581edfa3
TS
896 if (!STACK_GROWS_DOWNWARD)
897 anti_p = !anti_p;
9a08d230
RH
898
899 temp = expand_binop (Pmode,
900 anti_p ? sub_optab : add_optab,
901 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
902 OPTAB_LIB_WIDEN);
903
904 if (temp != stack_pointer_rtx)
905 insn = emit_move_insn (stack_pointer_rtx, temp);
906 else
907 {
908 insn = get_last_insn ();
909 temp = single_set (insn);
910 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
911 }
912
913 if (!suppress_reg_args_size)
914 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
915}
916
18ca7dab
RK
917/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
918 This pops when ADJUST is positive. ADJUST need not be constant. */
919
920void
502b8322 921adjust_stack (rtx adjust)
18ca7dab 922{
18ca7dab
RK
923 if (adjust == const0_rtx)
924 return;
925
1503a7ec
JH
926 /* We expect all variable sized adjustments to be multiple of
927 PREFERRED_STACK_BOUNDARY. */
481683e1 928 if (CONST_INT_P (adjust))
1503a7ec
JH
929 stack_pointer_delta -= INTVAL (adjust);
930
9a08d230 931 adjust_stack_1 (adjust, false);
18ca7dab
RK
932}
933
934/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
935 This pushes when ADJUST is positive. ADJUST need not be constant. */
936
937void
502b8322 938anti_adjust_stack (rtx adjust)
18ca7dab 939{
18ca7dab
RK
940 if (adjust == const0_rtx)
941 return;
942
1503a7ec
JH
943 /* We expect all variable sized adjustments to be multiple of
944 PREFERRED_STACK_BOUNDARY. */
481683e1 945 if (CONST_INT_P (adjust))
1503a7ec
JH
946 stack_pointer_delta += INTVAL (adjust);
947
9a08d230 948 adjust_stack_1 (adjust, true);
18ca7dab
RK
949}
950
951/* Round the size of a block to be pushed up to the boundary required
952 by this machine. SIZE is the desired size, which need not be constant. */
953
4dd9b044 954static rtx
502b8322 955round_push (rtx size)
18ca7dab 956{
32990d5b 957 rtx align_rtx, alignm1_rtx;
41ee3204 958
32990d5b
JJ
959 if (!SUPPORTS_STACK_ALIGNMENT
960 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 961 {
32990d5b
JJ
962 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
963
964 if (align == 1)
965 return size;
966
967 if (CONST_INT_P (size))
968 {
969 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 970
32990d5b
JJ
971 if (INTVAL (size) != new_size)
972 size = GEN_INT (new_size);
973 return size;
974 }
975
976 align_rtx = GEN_INT (align);
977 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
978 }
979 else
980 {
32990d5b
JJ
981 /* If crtl->preferred_stack_boundary might still grow, use
982 virtual_preferred_stack_boundary_rtx instead. This will be
983 substituted by the right value in vregs pass and optimized
984 during combine. */
985 align_rtx = virtual_preferred_stack_boundary_rtx;
0a81f074
RS
986 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
987 NULL_RTX);
18ca7dab 988 }
41ee3204 989
32990d5b
JJ
990 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
991 but we know it can't. So add ourselves and then do
992 TRUNC_DIV_EXPR. */
993 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
994 NULL_RTX, 1, OPTAB_LIB_WIDEN);
995 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
996 NULL_RTX, 1);
997 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
998
18ca7dab
RK
999 return size;
1000}
1001\f
59257ff7
RK
1002/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1003 to a previously-created save area. If no save area has been allocated,
1004 this function will allocate one. If a save area is specified, it
9eac0f2a 1005 must be of the proper mode. */
59257ff7
RK
1006
1007void
9eac0f2a 1008emit_stack_save (enum save_level save_level, rtx *psave)
59257ff7
RK
1009{
1010 rtx sa = *psave;
1011 /* The default is that we use a move insn and save in a Pmode object. */
4476e1a0 1012 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
ef4bddc2 1013 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
1014
1015 /* See if this machine has anything special to do for this kind of save. */
1016 switch (save_level)
1017 {
59257ff7 1018 case SAVE_BLOCK:
4476e1a0
RS
1019 if (targetm.have_save_stack_block ())
1020 fcn = targetm.gen_save_stack_block;
59257ff7 1021 break;
59257ff7 1022 case SAVE_FUNCTION:
4476e1a0
RS
1023 if (targetm.have_save_stack_function ())
1024 fcn = targetm.gen_save_stack_function;
59257ff7 1025 break;
59257ff7 1026 case SAVE_NONLOCAL:
4476e1a0
RS
1027 if (targetm.have_save_stack_nonlocal ())
1028 fcn = targetm.gen_save_stack_nonlocal;
59257ff7 1029 break;
38a448ca
RH
1030 default:
1031 break;
59257ff7
RK
1032 }
1033
1034 /* If there is no save area and we have to allocate one, do so. Otherwise
1035 verify the save area is the proper mode. */
1036
1037 if (sa == 0)
1038 {
1039 if (mode != VOIDmode)
1040 {
1041 if (save_level == SAVE_NONLOCAL)
1042 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1043 else
1044 *psave = sa = gen_reg_rtx (mode);
1045 }
1046 }
59257ff7 1047
9eac0f2a
RH
1048 do_pending_stack_adjust ();
1049 if (sa != 0)
1050 sa = validize_mem (sa);
1051 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
1052}
1053
1054/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
9eac0f2a 1055 area made by emit_stack_save. If it is zero, we have nothing to do. */
59257ff7
RK
1056
1057void
9eac0f2a 1058emit_stack_restore (enum save_level save_level, rtx sa)
59257ff7
RK
1059{
1060 /* The default is that we use a move insn. */
4476e1a0 1061 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
59257ff7 1062
50025f91
TV
1063 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1064 STACK_POINTER and HARD_FRAME_POINTER.
1065 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1066 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1067 aligned variables, which is reflected in ix86_can_eliminate.
1068 We normally still have the realigned STACK_POINTER that we can use.
1069 But if there is a stack restore still present at reload, it can trigger
1070 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1071 FRAME_POINTER into a hard reg.
1072 To prevent this situation, we force need_drap if we emit a stack
1073 restore. */
1074 if (SUPPORTS_STACK_ALIGNMENT)
1075 crtl->need_drap = true;
1076
59257ff7
RK
1077 /* See if this machine has anything special to do for this kind of save. */
1078 switch (save_level)
1079 {
59257ff7 1080 case SAVE_BLOCK:
4476e1a0
RS
1081 if (targetm.have_restore_stack_block ())
1082 fcn = targetm.gen_restore_stack_block;
59257ff7 1083 break;
59257ff7 1084 case SAVE_FUNCTION:
4476e1a0
RS
1085 if (targetm.have_restore_stack_function ())
1086 fcn = targetm.gen_restore_stack_function;
59257ff7 1087 break;
59257ff7 1088 case SAVE_NONLOCAL:
4476e1a0
RS
1089 if (targetm.have_restore_stack_nonlocal ())
1090 fcn = targetm.gen_restore_stack_nonlocal;
59257ff7 1091 break;
38a448ca
RH
1092 default:
1093 break;
59257ff7
RK
1094 }
1095
d072107f 1096 if (sa != 0)
260f91c2
DJ
1097 {
1098 sa = validize_mem (sa);
1099 /* These clobbers prevent the scheduler from moving
1100 references to variable arrays below the code
4b7e68e7 1101 that deletes (pops) the arrays. */
c41c1387
RS
1102 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1103 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1104 }
d072107f 1105
a494ed43
EB
1106 discard_pending_stack_adjust ();
1107
9eac0f2a 1108 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1109}
6de9cd9a
DN
1110
1111/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
d33606c3
EB
1112 function. This should be called whenever we allocate or deallocate
1113 dynamic stack space. */
6de9cd9a
DN
1114
1115void
1116update_nonlocal_goto_save_area (void)
1117{
1118 tree t_save;
1119 rtx r_save;
1120
1121 /* The nonlocal_goto_save_area object is an array of N pointers. The
1122 first one is used for the frame pointer save; the rest are sized by
1123 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1124 of the stack save area slots. */
6bbec3e1
L
1125 t_save = build4 (ARRAY_REF,
1126 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1127 cfun->nonlocal_goto_save_area,
3244e67d 1128 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1129 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1130
9eac0f2a 1131 emit_stack_save (SAVE_NONLOCAL, &r_save);
6de9cd9a 1132}
d33606c3
EB
1133
1134/* Record a new stack level for the current function. This should be called
1135 whenever we allocate or deallocate dynamic stack space. */
1136
1137void
1138record_new_stack_level (void)
1139{
1140 /* Record the new stack level for nonlocal gotos. */
1141 if (cfun->nonlocal_goto_save_area)
1142 update_nonlocal_goto_save_area ();
1143
1144 /* Record the new stack level for SJLJ exceptions. */
1145 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1146 update_sjlj_context ();
1147}
59257ff7 1148\f
18ca7dab 1149/* Return an rtx representing the address of an area of memory dynamically
3a42502d 1150 pushed on the stack.
18ca7dab
RK
1151
1152 Any required stack pointer alignment is preserved.
1153
1154 SIZE is an rtx representing the size of the area.
091ad0b9 1155
3a42502d
RH
1156 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1157 parameter may be zero. If so, a proper value will be extracted
1158 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1159
1160 REQUIRED_ALIGN is the alignment (in bits) required for the region
1161 of memory.
d3c12306
EB
1162
1163 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1164 stack space allocated by the generated code cannot be added with itself
1165 in the course of the execution of the function. It is always safe to
1166 pass FALSE here and the following criterion is sufficient in order to
1167 pass TRUE: every path in the CFG that starts at the allocation point and
1168 loops to it executes the associated deallocation code. */
18ca7dab
RK
1169
1170rtx
3a42502d
RH
1171allocate_dynamic_stack_space (rtx size, unsigned size_align,
1172 unsigned required_align, bool cannot_accumulate)
18ca7dab 1173{
d3c12306 1174 HOST_WIDE_INT stack_usage_size = -1;
528a80c1
DM
1175 rtx_code_label *final_label;
1176 rtx final_target, target;
4fc0c9c8 1177 unsigned extra;
d3c12306 1178
15fc0026 1179 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1180 to since we can't dereference it. But return a reasonable
15fc0026
RK
1181 address anyway. */
1182 if (size == const0_rtx)
1183 return virtual_stack_dynamic_rtx;
1184
1185 /* Otherwise, show we're calling alloca or equivalent. */
e3b5732b 1186 cfun->calls_alloca = 1;
15fc0026 1187
d3c12306
EB
1188 /* If stack usage info is requested, look into the size we are passed.
1189 We need to do so this early to avoid the obfuscation that may be
1190 introduced later by the various alignment operations. */
a11e0df4 1191 if (flag_stack_usage_info)
d3c12306 1192 {
32990d5b 1193 if (CONST_INT_P (size))
d3c12306 1194 stack_usage_size = INTVAL (size);
32990d5b 1195 else if (REG_P (size))
d3c12306
EB
1196 {
1197 /* Look into the last emitted insn and see if we can deduce
1198 something for the register. */
528a80c1
DM
1199 rtx_insn *insn;
1200 rtx set, note;
d3c12306
EB
1201 insn = get_last_insn ();
1202 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1203 {
32990d5b 1204 if (CONST_INT_P (SET_SRC (set)))
d3c12306
EB
1205 stack_usage_size = INTVAL (SET_SRC (set));
1206 else if ((note = find_reg_equal_equiv_note (insn))
32990d5b 1207 && CONST_INT_P (XEXP (note, 0)))
d3c12306
EB
1208 stack_usage_size = INTVAL (XEXP (note, 0));
1209 }
1210 }
1211
1212 /* If the size is not constant, we can't say anything. */
1213 if (stack_usage_size == -1)
1214 {
1215 current_function_has_unbounded_dynamic_stack_size = 1;
1216 stack_usage_size = 0;
1217 }
1218 }
1219
18ca7dab
RK
1220 /* Ensure the size is in the proper mode. */
1221 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1222 size = convert_to_mode (Pmode, size, 1);
1223
3a42502d
RH
1224 /* Adjust SIZE_ALIGN, if needed. */
1225 if (CONST_INT_P (size))
1226 {
1227 unsigned HOST_WIDE_INT lsb;
1228
1229 lsb = INTVAL (size);
1230 lsb &= -lsb;
1231
1232 /* Watch out for overflow truncating to "unsigned". */
1233 if (lsb > UINT_MAX / BITS_PER_UNIT)
1234 size_align = 1u << (HOST_BITS_PER_INT - 1);
1235 else
1236 size_align = (unsigned)lsb * BITS_PER_UNIT;
1237 }
1238 else if (size_align < BITS_PER_UNIT)
1239 size_align = BITS_PER_UNIT;
1240
34831f3e
RH
1241 /* We can't attempt to minimize alignment necessary, because we don't
1242 know the final value of preferred_stack_boundary yet while executing
1243 this code. */
1244 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1245 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1246
18ca7dab 1247 /* We will need to ensure that the address we return is aligned to
4fc0c9c8
DV
1248 REQUIRED_ALIGN. At this point in the compilation, we don't always
1249 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1250 (it might depend on the size of the outgoing parameter lists, for
1251 example), so we must preventively align the value. We leave space
1252 in SIZE for the hole that might result from the alignment operation. */
1ecad98e 1253
4fc0c9c8
DV
1254 extra = (required_align - BITS_PER_UNIT) / BITS_PER_UNIT;
1255 size = plus_constant (Pmode, size, extra);
1256 size = force_operand (size, NULL_RTX);
3a42502d 1257
4fc0c9c8
DV
1258 if (flag_stack_usage_info)
1259 stack_usage_size += extra;
34831f3e 1260
4fc0c9c8
DV
1261 if (extra && size_align > BITS_PER_UNIT)
1262 size_align = BITS_PER_UNIT;
1d9d04f8 1263
18ca7dab 1264 /* Round the size to a multiple of the required stack alignment.
34831f3e 1265 Since the stack if presumed to be rounded before this allocation,
18ca7dab
RK
1266 this will maintain the required alignment.
1267
1268 If the stack grows downward, we could save an insn by subtracting
1269 SIZE from the stack pointer and then aligning the stack pointer.
1270 The problem with this is that the stack pointer may be unaligned
1271 between the execution of the subtraction and alignment insns and
1272 some machines do not allow this. Even on those that do, some
1273 signal handlers malfunction if a signal should occur between those
1274 insns. Since this is an extremely rare event, we have no reliable
1275 way of knowing which systems have this problem. So we avoid even
1276 momentarily mis-aligning the stack. */
3a42502d 1277 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1278 {
1279 size = round_push (size);
18ca7dab 1280
a11e0df4 1281 if (flag_stack_usage_info)
d3c12306 1282 {
32990d5b 1283 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
d3c12306
EB
1284 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1285 }
1286 }
1287
3a42502d 1288 target = gen_reg_rtx (Pmode);
7458026b 1289
d3c12306
EB
1290 /* The size is supposed to be fully adjusted at this point so record it
1291 if stack usage info is requested. */
a11e0df4 1292 if (flag_stack_usage_info)
d3c12306
EB
1293 {
1294 current_function_dynamic_stack_size += stack_usage_size;
1295
1296 /* ??? This is gross but the only safe stance in the absence
1297 of stack usage oriented flow analysis. */
1298 if (!cannot_accumulate)
1299 current_function_has_unbounded_dynamic_stack_size = 1;
1300 }
18ca7dab 1301
528a80c1 1302 final_label = NULL;
7458026b
ILT
1303 final_target = NULL_RTX;
1304
1305 /* If we are splitting the stack, we need to ask the backend whether
1306 there is enough room on the current stack. If there isn't, or if
1307 the backend doesn't know how to tell is, then we need to call a
1308 function to allocate memory in some other way. This memory will
1309 be released when we release the current stack segment. The
1310 effect is that stack allocation becomes less efficient, but at
1311 least it doesn't cause a stack overflow. */
1312 if (flag_split_stack)
1313 {
528a80c1
DM
1314 rtx_code_label *available_label;
1315 rtx ask, space, func;
7458026b 1316
528a80c1 1317 available_label = NULL;
7458026b 1318
10169a8b 1319 if (targetm.have_split_stack_space_check ())
7458026b
ILT
1320 {
1321 available_label = gen_label_rtx ();
1322
1323 /* This instruction will branch to AVAILABLE_LABEL if there
1324 are SIZE bytes available on the stack. */
10169a8b
RS
1325 emit_insn (targetm.gen_split_stack_space_check
1326 (size, available_label));
7458026b 1327 }
7458026b 1328
c3928dde 1329 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1330 memory using malloc. If the alignment of the memory returned
1331 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1332 make sure we allocate enough space. */
1333 if (MALLOC_ABI_ALIGNMENT >= required_align)
1334 ask = size;
1335 else
4fc0c9c8
DV
1336 ask = expand_binop (Pmode, add_optab, size,
1337 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1338 Pmode),
1339 NULL_RTX, 1, OPTAB_LIB_WIDEN);
c3928dde 1340
7458026b
ILT
1341 func = init_one_libfunc ("__morestack_allocate_stack_space");
1342
1343 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
c3928dde 1344 1, ask, Pmode);
7458026b
ILT
1345
1346 if (available_label == NULL_RTX)
1347 return space;
1348
1349 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1350
1351 emit_move_insn (final_target, space);
1352
1353 final_label = gen_label_rtx ();
1354 emit_jump (final_label);
1355
1356 emit_label (available_label);
1357 }
1358
18ca7dab
RK
1359 do_pending_stack_adjust ();
1360
1503a7ec 1361 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1362 properly. */
5b0264cb
NS
1363 gcc_assert (!(stack_pointer_delta
1364 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1503a7ec 1365
d809253a
EB
1366 /* If needed, check that we have the required amount of stack. Take into
1367 account what has already been checked. */
1368 if (STACK_CHECK_MOVING_SP)
1369 ;
1370 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1371 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1372 size);
1373 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1374 probe_stack_range (STACK_CHECK_PROTECT, size);
edff2491 1375
efec771a
RH
1376 /* Don't let anti_adjust_stack emit notes. */
1377 suppress_reg_args_size = true;
1378
18ca7dab
RK
1379 /* Perform the required allocation from the stack. Some systems do
1380 this differently than simply incrementing/decrementing from the
38a448ca 1381 stack pointer, such as acquiring the space by calling malloc(). */
10169a8b 1382 if (targetm.have_allocate_stack ())
18ca7dab 1383 {
a5c7d693 1384 struct expand_operand ops[2];
4b6c1672
RK
1385 /* We don't have to check against the predicate for operand 0 since
1386 TARGET is known to be a pseudo of the proper mode, which must
a5c7d693
RS
1387 be valid for the operand. */
1388 create_fixed_operand (&ops[0], target);
1389 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
10169a8b 1390 expand_insn (targetm.code_for_allocate_stack, 2, ops);
18ca7dab
RK
1391 }
1392 else
ea534b63 1393 {
32990d5b
JJ
1394 int saved_stack_pointer_delta;
1395
581edfa3
TS
1396 if (!STACK_GROWS_DOWNWARD)
1397 emit_move_insn (target, virtual_stack_dynamic_rtx);
a157febd
GK
1398
1399 /* Check stack bounds if necessary. */
e3b5732b 1400 if (crtl->limit_stack)
a157febd
GK
1401 {
1402 rtx available;
528a80c1 1403 rtx_code_label *space_available = gen_label_rtx ();
581edfa3
TS
1404 if (STACK_GROWS_DOWNWARD)
1405 available = expand_binop (Pmode, sub_optab,
1406 stack_pointer_rtx, stack_limit_rtx,
1407 NULL_RTX, 1, OPTAB_WIDEN);
1408 else
1409 available = expand_binop (Pmode, sub_optab,
1410 stack_limit_rtx, stack_pointer_rtx,
1411 NULL_RTX, 1, OPTAB_WIDEN);
1412
a157febd 1413 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1414 space_available);
eb6f47fb
RS
1415 if (targetm.have_trap ())
1416 emit_insn (targetm.gen_trap ());
a157febd 1417 else
a157febd
GK
1418 error ("stack limits not supported on this target");
1419 emit_barrier ();
1420 emit_label (space_available);
1421 }
1422
32990d5b 1423 saved_stack_pointer_delta = stack_pointer_delta;
9a08d230 1424
d809253a 1425 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1426 anti_adjust_stack_and_probe (size, false);
d809253a
EB
1427 else
1428 anti_adjust_stack (size);
9a08d230 1429
32990d5b
JJ
1430 /* Even if size is constant, don't modify stack_pointer_delta.
1431 The constant size alloca should preserve
1432 crtl->preferred_stack_boundary alignment. */
1433 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1434
581edfa3
TS
1435 if (STACK_GROWS_DOWNWARD)
1436 emit_move_insn (target, virtual_stack_dynamic_rtx);
38a448ca 1437 }
18ca7dab 1438
efec771a
RH
1439 suppress_reg_args_size = false;
1440
3a42502d
RH
1441 /* Finish up the split stack handling. */
1442 if (final_label != NULL_RTX)
1443 {
1444 gcc_assert (flag_split_stack);
1445 emit_move_insn (final_target, target);
1446 emit_label (final_label);
1447 target = final_target;
1448 }
1449
4fc0c9c8
DV
1450 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1451 but we know it can't. So add ourselves and then do
1452 TRUNC_DIV_EXPR. */
1453 target = expand_binop (Pmode, add_optab, target,
1454 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1455 Pmode),
1456 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1457 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1458 gen_int_mode (required_align / BITS_PER_UNIT, Pmode),
1459 NULL_RTX, 1);
1460 target = expand_mult (Pmode, target,
1461 gen_int_mode (required_align / BITS_PER_UNIT, Pmode),
1462 NULL_RTX, 1);
d9b3eb63 1463
3a42502d
RH
1464 /* Now that we've committed to a return value, mark its alignment. */
1465 mark_reg_pointer (target, required_align);
1466
d33606c3
EB
1467 /* Record the new stack level. */
1468 record_new_stack_level ();
15fc0026 1469
18ca7dab
RK
1470 return target;
1471}
1472\f
d9b3eb63 1473/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1474 run-time routine to call to check the stack, so provide a mechanism for
1475 calling that routine. */
1476
e2500fed 1477static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1478
1479void
d477d1fe 1480set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1481{
d477d1fe
SB
1482 gcc_assert (stack_check_libfunc == NULL_RTX);
1483 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
14a774a9
RK
1484}
1485\f
edff2491
RK
1486/* Emit one stack probe at ADDRESS, an address within the stack. */
1487
260c8ba3 1488void
502b8322 1489emit_stack_probe (rtx address)
edff2491 1490{
10169a8b
RS
1491 if (targetm.have_probe_stack_address ())
1492 emit_insn (targetm.gen_probe_stack_address (address));
7b84aac0 1493 else
7b84aac0
EB
1494 {
1495 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491 1496
7b84aac0 1497 MEM_VOLATILE_P (memref) = 1;
edff2491 1498
7b84aac0 1499 /* See if we have an insn to probe the stack. */
10169a8b
RS
1500 if (targetm.have_probe_stack ())
1501 emit_insn (targetm.gen_probe_stack (memref));
7b84aac0 1502 else
7b84aac0
EB
1503 emit_move_insn (memref, const0_rtx);
1504 }
edff2491
RK
1505}
1506
d9b3eb63 1507/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1508 FIRST is a constant and size is a Pmode RTX. These are offsets from
1509 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1510 or subtract them from the stack pointer. */
1511
1512#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491 1513
62f9f30b 1514#if STACK_GROWS_DOWNWARD
edff2491 1515#define STACK_GROW_OP MINUS
d809253a
EB
1516#define STACK_GROW_OPTAB sub_optab
1517#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1518#else
1519#define STACK_GROW_OP PLUS
d809253a
EB
1520#define STACK_GROW_OPTAB add_optab
1521#define STACK_GROW_OFF(off) (off)
edff2491
RK
1522#endif
1523
1524void
502b8322 1525probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1526{
4b6c1672
RK
1527 /* First ensure SIZE is Pmode. */
1528 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1529 size = convert_to_mode (Pmode, size, 1);
1530
d809253a
EB
1531 /* Next see if we have a function to check the stack. */
1532 if (stack_check_libfunc)
f5f5363f 1533 {
d809253a 1534 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1535 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1536 stack_pointer_rtx,
0a81f074
RS
1537 plus_constant (Pmode,
1538 size, first)));
0529235d 1539 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode, 1, addr,
949fa04c 1540 Pmode);
f5f5363f 1541 }
14a774a9 1542
d809253a 1543 /* Next see if we have an insn to check the stack. */
10169a8b 1544 else if (targetm.have_check_stack ())
edff2491 1545 {
a5c7d693 1546 struct expand_operand ops[1];
d809253a
EB
1547 rtx addr = memory_address (Pmode,
1548 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1549 stack_pointer_rtx,
0a81f074
RS
1550 plus_constant (Pmode,
1551 size, first)));
d6a6a07a 1552 bool success;
a5c7d693 1553 create_input_operand (&ops[0], addr, Pmode);
10169a8b 1554 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
d6a6a07a 1555 gcc_assert (success);
edff2491 1556 }
edff2491 1557
d809253a
EB
1558 /* Otherwise we have to generate explicit probes. If we have a constant
1559 small number of them to generate, that's the easy case. */
1560 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1561 {
d809253a
EB
1562 HOST_WIDE_INT isize = INTVAL (size), i;
1563 rtx addr;
1564
1565 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1566 it exceeds SIZE. If only one probe is needed, this will not
1567 generate any code. Then probe at FIRST + SIZE. */
1568 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1569 {
1570 addr = memory_address (Pmode,
0a81f074 1571 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1572 STACK_GROW_OFF (first + i)));
1573 emit_stack_probe (addr);
1574 }
1575
1576 addr = memory_address (Pmode,
0a81f074 1577 plus_constant (Pmode, stack_pointer_rtx,
d809253a
EB
1578 STACK_GROW_OFF (first + isize)));
1579 emit_stack_probe (addr);
edff2491
RK
1580 }
1581
d809253a
EB
1582 /* In the variable case, do the same as above, but in a loop. Note that we
1583 must be extra careful with variables wrapping around because we might be
1584 at the very top (or the very bottom) of the address space and we have to
1585 be able to handle this case properly; in particular, we use an equality
1586 test for the loop condition. */
edff2491
RK
1587 else
1588 {
d809253a 1589 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
528a80c1
DM
1590 rtx_code_label *loop_lab = gen_label_rtx ();
1591 rtx_code_label *end_lab = gen_label_rtx ();
edff2491 1592
d809253a
EB
1593 /* Step 1: round SIZE to the previous multiple of the interval. */
1594
1595 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1596 rounded_size
69a59f0f
RS
1597 = simplify_gen_binary (AND, Pmode, size,
1598 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1599 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1600
1601
1602 /* Step 2: compute initial and final value of the loop counter. */
1603
1604 /* TEST_ADDR = SP + FIRST. */
1605 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1606 stack_pointer_rtx,
4789c0ce
RS
1607 gen_int_mode (first, Pmode)),
1608 NULL_RTX);
d809253a
EB
1609
1610 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1611 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1612 test_addr,
1613 rounded_size_op), NULL_RTX);
1614
1615
1616 /* Step 3: the loop
1617
1618 while (TEST_ADDR != LAST_ADDR)
1619 {
1620 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1621 probe at TEST_ADDR
1622 }
1623
1624 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1625 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1626
1627 emit_label (loop_lab);
edff2491 1628
d809253a
EB
1629 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1630 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1631 end_lab);
1632
1633 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1634 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
2f1cd2eb 1635 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
edff2491 1636 1, OPTAB_WIDEN);
edff2491 1637
5b0264cb 1638 gcc_assert (temp == test_addr);
edff2491 1639
d809253a
EB
1640 /* Probe at TEST_ADDR. */
1641 emit_stack_probe (test_addr);
1642
1643 emit_jump (loop_lab);
1644
edff2491
RK
1645 emit_label (end_lab);
1646
d809253a
EB
1647
1648 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1649 that SIZE is equal to ROUNDED_SIZE. */
1650
1651 /* TEMP = SIZE - ROUNDED_SIZE. */
1652 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1653 if (temp != const0_rtx)
1654 {
1655 rtx addr;
1656
32990d5b 1657 if (CONST_INT_P (temp))
d809253a
EB
1658 {
1659 /* Use [base + disp} addressing mode if supported. */
1660 HOST_WIDE_INT offset = INTVAL (temp);
1661 addr = memory_address (Pmode,
0a81f074 1662 plus_constant (Pmode, last_addr,
d809253a
EB
1663 STACK_GROW_OFF (offset)));
1664 }
1665 else
1666 {
1667 /* Manual CSE if the difference is not known at compile-time. */
1668 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1669 addr = memory_address (Pmode,
1670 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1671 last_addr, temp));
1672 }
1673
1674 emit_stack_probe (addr);
1675 }
edff2491 1676 }
eabcc725
EB
1677
1678 /* Make sure nothing is scheduled before we are done. */
1679 emit_insn (gen_blockage ());
edff2491 1680}
d809253a 1681
c35af30f
EB
1682/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1683 while probing it. This pushes when SIZE is positive. SIZE need not
1684 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1685 by plus SIZE at the end. */
d809253a 1686
c35af30f
EB
1687void
1688anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 1689{
c35af30f
EB
1690 /* We skip the probe for the first interval + a small dope of 4 words and
1691 probe that many bytes past the specified size to maintain a protection
1692 area at the botton of the stack. */
d809253a
EB
1693 const int dope = 4 * UNITS_PER_WORD;
1694
1695 /* First ensure SIZE is Pmode. */
1696 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1697 size = convert_to_mode (Pmode, size, 1);
1698
1699 /* If we have a constant small number of probes to generate, that's the
1700 easy case. */
32990d5b 1701 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
1702 {
1703 HOST_WIDE_INT isize = INTVAL (size), i;
1704 bool first_probe = true;
1705
260c8ba3 1706 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1707 values of N from 1 until it exceeds SIZE. If only one probe is
1708 needed, this will not generate any code. Then adjust and probe
1709 to PROBE_INTERVAL + SIZE. */
1710 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1711 {
1712 if (first_probe)
1713 {
1714 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1715 first_probe = false;
1716 }
1717 else
1718 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1719 emit_stack_probe (stack_pointer_rtx);
1720 }
1721
1722 if (first_probe)
0a81f074 1723 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
d809253a 1724 else
0a81f074 1725 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
d809253a
EB
1726 emit_stack_probe (stack_pointer_rtx);
1727 }
1728
1729 /* In the variable case, do the same as above, but in a loop. Note that we
1730 must be extra careful with variables wrapping around because we might be
1731 at the very top (or the very bottom) of the address space and we have to
1732 be able to handle this case properly; in particular, we use an equality
1733 test for the loop condition. */
1734 else
1735 {
1736 rtx rounded_size, rounded_size_op, last_addr, temp;
528a80c1
DM
1737 rtx_code_label *loop_lab = gen_label_rtx ();
1738 rtx_code_label *end_lab = gen_label_rtx ();
d809253a
EB
1739
1740
1741 /* Step 1: round SIZE to the previous multiple of the interval. */
1742
1743 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1744 rounded_size
69a59f0f
RS
1745 = simplify_gen_binary (AND, Pmode, size,
1746 gen_int_mode (-PROBE_INTERVAL, Pmode));
d809253a
EB
1747 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1748
1749
1750 /* Step 2: compute initial and final value of the loop counter. */
1751
1752 /* SP = SP_0 + PROBE_INTERVAL. */
1753 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1754
1755 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1756 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1757 stack_pointer_rtx,
1758 rounded_size_op), NULL_RTX);
1759
1760
1761 /* Step 3: the loop
1762
260c8ba3
EB
1763 while (SP != LAST_ADDR)
1764 {
1765 SP = SP + PROBE_INTERVAL
1766 probe at SP
1767 }
d809253a 1768
260c8ba3 1769 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1770 values of N from 1 until it is equal to ROUNDED_SIZE. */
1771
1772 emit_label (loop_lab);
1773
1774 /* Jump to END_LAB if SP == LAST_ADDR. */
1775 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1776 Pmode, 1, end_lab);
1777
1778 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1779 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1780 emit_stack_probe (stack_pointer_rtx);
1781
1782 emit_jump (loop_lab);
1783
1784 emit_label (end_lab);
1785
1786
260c8ba3 1787 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
1788 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1789
1790 /* TEMP = SIZE - ROUNDED_SIZE. */
1791 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1792 if (temp != const0_rtx)
1793 {
1794 /* Manual CSE if the difference is not known at compile-time. */
1795 if (GET_CODE (temp) != CONST_INT)
1796 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1797 anti_adjust_stack (temp);
1798 emit_stack_probe (stack_pointer_rtx);
1799 }
1800 }
1801
c35af30f
EB
1802 /* Adjust back and account for the additional first interval. */
1803 if (adjust_back)
0a81f074 1804 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
c35af30f
EB
1805 else
1806 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
1807}
1808
18ca7dab
RK
1809/* Return an rtx representing the register or memory location
1810 in which a scalar value of data type VALTYPE
1811 was returned by a function call to function FUNC.
1d636cc6
RG
1812 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1813 function is known, otherwise 0.
4dc07bd7
JJ
1814 OUTGOING is 1 if on a machine with register windows this function
1815 should return the register in which the function will put its result
30f7a378 1816 and 0 otherwise. */
18ca7dab
RK
1817
1818rtx
586de218 1819hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 1820 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 1821{
4dc07bd7 1822 rtx val;
770ae6cc 1823
1d636cc6 1824 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 1825
f8cfc6aa 1826 if (REG_P (val)
e1a4071f
JL
1827 && GET_MODE (val) == BLKmode)
1828 {
770ae6cc 1829 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
ef4bddc2 1830 machine_mode tmpmode;
770ae6cc 1831
d9b3eb63 1832 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
1833 since the value of bytes will then be large enough that no
1834 mode will match anyway. */
d9b3eb63 1835
e1a4071f 1836 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0fb7aeda
KH
1837 tmpmode != VOIDmode;
1838 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1839 {
1840 /* Have we found a large enough mode? */
1841 if (GET_MODE_SIZE (tmpmode) >= bytes)
1842 break;
1843 }
e1a4071f
JL
1844
1845 /* No suitable mode found. */
5b0264cb 1846 gcc_assert (tmpmode != VOIDmode);
e1a4071f
JL
1847
1848 PUT_MODE (val, tmpmode);
d9b3eb63 1849 }
e1a4071f 1850 return val;
18ca7dab
RK
1851}
1852
1853/* Return an rtx representing the register or memory location
1854 in which a scalar value of mode MODE was returned by a library call. */
1855
1856rtx
ef4bddc2 1857hard_libcall_value (machine_mode mode, rtx fun)
18ca7dab 1858{
390b17c2 1859 return targetm.calls.libcall_value (mode, fun);
18ca7dab 1860}
0c5e217d
RS
1861
1862/* Look up the tree code for a given rtx code
5c88ea94 1863 to provide the arithmetic operation for real_arithmetic.
0c5e217d
RS
1864 The function returns an int because the caller may not know
1865 what `enum tree_code' means. */
1866
1867int
502b8322 1868rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
1869{
1870 enum tree_code tcode;
1871
1872 switch (code)
1873 {
1874 case PLUS:
1875 tcode = PLUS_EXPR;
1876 break;
1877 case MINUS:
1878 tcode = MINUS_EXPR;
1879 break;
1880 case MULT:
1881 tcode = MULT_EXPR;
1882 break;
1883 case DIV:
1884 tcode = RDIV_EXPR;
1885 break;
1886 case SMIN:
1887 tcode = MIN_EXPR;
1888 break;
1889 case SMAX:
1890 tcode = MAX_EXPR;
1891 break;
1892 default:
1893 tcode = LAST_AND_UNUSED_TREE_CODE;
1894 break;
1895 }
1896 return ((int) tcode);
1897}
e2500fed
GK
1898
1899#include "gt-explow.h"