]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
re PR tree-optimization/47179 (SPU: errno misoptimization around malloc call)
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
ef58a523 2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
7458026b 3 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010
71d59383 4 Free Software Foundation, Inc.
18ca7dab 5
1322177d 6This file is part of GCC.
18ca7dab 7
1322177d
LB
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
1322177d 11version.
18ca7dab 12
1322177d
LB
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
18ca7dab
RK
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
18ca7dab
RK
21
22
23#include "config.h"
670ee920 24#include "system.h"
4977bab6
ZW
25#include "coretypes.h"
26#include "tm.h"
718f9c0f 27#include "diagnostic-core.h"
18ca7dab
RK
28#include "rtl.h"
29#include "tree.h"
6baf1cc8 30#include "tm_p.h"
18ca7dab 31#include "flags.h"
b38f3813 32#include "except.h"
49ad7cfa 33#include "function.h"
18ca7dab 34#include "expr.h"
e78d8e51 35#include "optabs.h"
d477d1fe 36#include "libfuncs.h"
18ca7dab
RK
37#include "hard-reg-set.h"
38#include "insn-config.h"
1d974ca7 39#include "ggc.h"
18ca7dab 40#include "recog.h"
a77a9a18 41#include "langhooks.h"
1d636cc6 42#include "target.h"
aacd3885 43#include "output.h"
18ca7dab 44
502b8322 45static rtx break_out_memory_refs (rtx);
7e4ce834
RH
46
47
48/* Truncate and perhaps sign-extend C as appropriate for MODE. */
49
50HOST_WIDE_INT
502b8322 51trunc_int_for_mode (HOST_WIDE_INT c, enum machine_mode mode)
7e4ce834
RH
52{
53 int width = GET_MODE_BITSIZE (mode);
54
71012d97 55 /* You want to truncate to a _what_? */
5b0264cb 56 gcc_assert (SCALAR_INT_MODE_P (mode));
71012d97 57
1f3f36d1
RH
58 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
59 if (mode == BImode)
60 return c & 1 ? STORE_FLAG_VALUE : 0;
61
5b0d91c3
AO
62 /* Sign-extend for the requested mode. */
63
64 if (width < HOST_BITS_PER_WIDE_INT)
65 {
66 HOST_WIDE_INT sign = 1;
67 sign <<= width - 1;
68 c &= (sign << 1) - 1;
69 c ^= sign;
70 c -= sign;
71 }
7e4ce834
RH
72
73 return c;
74}
75
3e95a7cb 76/* Return an rtx for the sum of X and the integer C. */
18ca7dab
RK
77
78rtx
3e95a7cb 79plus_constant (rtx x, HOST_WIDE_INT c)
18ca7dab 80{
b3694847 81 RTX_CODE code;
17ab7c59 82 rtx y;
b3694847
SS
83 enum machine_mode mode;
84 rtx tem;
18ca7dab
RK
85 int all_constant = 0;
86
87 if (c == 0)
88 return x;
89
90 restart:
91
92 code = GET_CODE (x);
93 mode = GET_MODE (x);
17ab7c59
RK
94 y = x;
95
18ca7dab
RK
96 switch (code)
97 {
98 case CONST_INT:
b1ec3c92 99 return GEN_INT (INTVAL (x) + c);
18ca7dab
RK
100
101 case CONST_DOUBLE:
102 {
f9e158c3 103 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
b1ec3c92 104 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
f9e158c3 105 unsigned HOST_WIDE_INT l2 = c;
b1ec3c92 106 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
f9e158c3
JM
107 unsigned HOST_WIDE_INT lv;
108 HOST_WIDE_INT hv;
18ca7dab
RK
109
110 add_double (l1, h1, l2, h2, &lv, &hv);
111
112 return immed_double_const (lv, hv, VOIDmode);
113 }
114
115 case MEM:
116 /* If this is a reference to the constant pool, try replacing it with
117 a reference to a new constant. If the resulting address isn't
118 valid, don't return it because we have no way to validize it. */
119 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
120 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
121 {
122 tem
123 = force_const_mem (GET_MODE (x),
124 plus_constant (get_pool_constant (XEXP (x, 0)),
125 c));
126 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
127 return tem;
128 }
129 break;
130
131 case CONST:
132 /* If adding to something entirely constant, set a flag
133 so that we can add a CONST around the result. */
134 x = XEXP (x, 0);
135 all_constant = 1;
136 goto restart;
137
138 case SYMBOL_REF:
139 case LABEL_REF:
140 all_constant = 1;
141 break;
142
143 case PLUS:
144 /* The interesting case is adding the integer to a sum.
145 Look for constant term in the sum and combine
146 with C. For an integer constant term, we make a combined
147 integer. For a constant term that is not an explicit integer,
d9b3eb63 148 we cannot really combine, but group them together anyway.
e5671f2b 149
03d937fc
R
150 Restart or use a recursive call in case the remaining operand is
151 something that we handle specially, such as a SYMBOL_REF.
152
153 We may not immediately return from the recursive call here, lest
154 all_constant gets lost. */
e5671f2b 155
481683e1 156 if (CONST_INT_P (XEXP (x, 1)))
03d937fc
R
157 {
158 c += INTVAL (XEXP (x, 1));
7e4ce834
RH
159
160 if (GET_MODE (x) != VOIDmode)
161 c = trunc_int_for_mode (c, GET_MODE (x));
162
03d937fc
R
163 x = XEXP (x, 0);
164 goto restart;
165 }
b72f00af 166 else if (CONSTANT_P (XEXP (x, 1)))
03d937fc 167 {
b72f00af 168 x = gen_rtx_PLUS (mode, XEXP (x, 0), plus_constant (XEXP (x, 1), c));
03d937fc
R
169 c = 0;
170 }
b72f00af 171 else if (find_constant_term_loc (&y))
03d937fc 172 {
b72f00af
RK
173 /* We need to be careful since X may be shared and we can't
174 modify it in place. */
175 rtx copy = copy_rtx (x);
176 rtx *const_loc = find_constant_term_loc (&copy);
177
178 *const_loc = plus_constant (*const_loc, c);
179 x = copy;
03d937fc
R
180 c = 0;
181 }
38a448ca 182 break;
ed8908e7 183
38a448ca
RH
184 default:
185 break;
18ca7dab
RK
186 }
187
188 if (c != 0)
38a448ca 189 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
18ca7dab
RK
190
191 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
192 return x;
193 else if (all_constant)
38a448ca 194 return gen_rtx_CONST (mode, x);
18ca7dab
RK
195 else
196 return x;
197}
18ca7dab
RK
198\f
199/* If X is a sum, return a new sum like X but lacking any constant terms.
200 Add all the removed constant terms into *CONSTPTR.
201 X itself is not altered. The result != X if and only if
202 it is not isomorphic to X. */
203
204rtx
502b8322 205eliminate_constant_term (rtx x, rtx *constptr)
18ca7dab 206{
b3694847 207 rtx x0, x1;
18ca7dab
RK
208 rtx tem;
209
210 if (GET_CODE (x) != PLUS)
211 return x;
212
213 /* First handle constants appearing at this level explicitly. */
481683e1 214 if (CONST_INT_P (XEXP (x, 1))
18ca7dab
RK
215 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
216 XEXP (x, 1)))
481683e1 217 && CONST_INT_P (tem))
18ca7dab
RK
218 {
219 *constptr = tem;
220 return eliminate_constant_term (XEXP (x, 0), constptr);
221 }
222
223 tem = const0_rtx;
224 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
225 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
226 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
227 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
228 *constptr, tem))
481683e1 229 && CONST_INT_P (tem))
18ca7dab
RK
230 {
231 *constptr = tem;
38a448ca 232 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
233 }
234
235 return x;
236}
237
18ca7dab
RK
238/* Return an rtx for the size in bytes of the value of EXP. */
239
240rtx
502b8322 241expr_size (tree exp)
18ca7dab 242{
d25cee4d
RH
243 tree size;
244
245 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
246 size = TREE_OPERAND (exp, 1);
247 else
26979bc2 248 {
71c00b5c 249 size = tree_expr_size (exp);
26979bc2 250 gcc_assert (size);
2ec5deb5 251 gcc_assert (size == SUBSTITUTE_PLACEHOLDER_IN_EXPR (size, exp));
26979bc2 252 }
99098c66 253
49452c07 254 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype), EXPAND_NORMAL);
18ca7dab 255}
de8920be
JM
256
257/* Return a wide integer for the size in bytes of the value of EXP, or -1
258 if the size can vary or is larger than an integer. */
259
260HOST_WIDE_INT
502b8322 261int_expr_size (tree exp)
de8920be 262{
d25cee4d
RH
263 tree size;
264
265 if (TREE_CODE (exp) == WITH_SIZE_EXPR)
266 size = TREE_OPERAND (exp, 1);
267 else
26979bc2 268 {
71c00b5c 269 size = tree_expr_size (exp);
26979bc2
JH
270 gcc_assert (size);
271 }
d25cee4d
RH
272
273 if (size == 0 || !host_integerp (size, 0))
de8920be
JM
274 return -1;
275
d25cee4d 276 return tree_low_cst (size, 0);
de8920be 277}
18ca7dab
RK
278\f
279/* Return a copy of X in which all memory references
280 and all constants that involve symbol refs
281 have been replaced with new temporary registers.
282 Also emit code to load the memory locations and constants
283 into those registers.
284
285 If X contains no such constants or memory references,
286 X itself (not a copy) is returned.
287
288 If a constant is found in the address that is not a legitimate constant
289 in an insn, it is left alone in the hope that it might be valid in the
290 address.
291
292 X may contain no arithmetic except addition, subtraction and multiplication.
293 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
294
295static rtx
502b8322 296break_out_memory_refs (rtx x)
18ca7dab 297{
3c0cb5de 298 if (MEM_P (x)
cabeca29 299 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 300 && GET_MODE (x) != VOIDmode))
2cca6e3f 301 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
302 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
303 || GET_CODE (x) == MULT)
304 {
b3694847
SS
305 rtx op0 = break_out_memory_refs (XEXP (x, 0));
306 rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 307
18ca7dab 308 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
d4ebfa65 309 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
18ca7dab 310 }
2cca6e3f 311
18ca7dab
RK
312 return x;
313}
314
d4ebfa65
BE
315/* Given X, a memory address in address space AS' pointer mode, convert it to
316 an address in the address space's address mode, or vice versa (TO_MODE says
317 which way). We take advantage of the fact that pointers are not allowed to
318 overflow by commuting arithmetic operations over conversions so that address
319 arithmetic insns can be used. */
ea534b63 320
498b529f 321rtx
d4ebfa65
BE
322convert_memory_address_addr_space (enum machine_mode to_mode ATTRIBUTE_UNUSED,
323 rtx x, addr_space_t as ATTRIBUTE_UNUSED)
ea534b63 324{
5ae6cd0d 325#ifndef POINTERS_EXTEND_UNSIGNED
7c137931 326 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
5ae6cd0d
MM
327 return x;
328#else /* defined(POINTERS_EXTEND_UNSIGNED) */
d4ebfa65 329 enum machine_mode pointer_mode, address_mode, from_mode;
498b529f 330 rtx temp;
aa0f70e6 331 enum rtx_code code;
498b529f 332
5ae6cd0d
MM
333 /* If X already has the right mode, just return it. */
334 if (GET_MODE (x) == to_mode)
335 return x;
336
d4ebfa65
BE
337 pointer_mode = targetm.addr_space.pointer_mode (as);
338 address_mode = targetm.addr_space.address_mode (as);
339 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
5ae6cd0d 340
0b04ec8c
RK
341 /* Here we handle some special cases. If none of them apply, fall through
342 to the default case. */
ea534b63
RK
343 switch (GET_CODE (x))
344 {
345 case CONST_INT:
346 case CONST_DOUBLE:
aa0f70e6
SE
347 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
348 code = TRUNCATE;
349 else if (POINTERS_EXTEND_UNSIGNED < 0)
350 break;
351 else if (POINTERS_EXTEND_UNSIGNED > 0)
352 code = ZERO_EXTEND;
353 else
354 code = SIGN_EXTEND;
355 temp = simplify_unary_operation (code, to_mode, x, from_mode);
356 if (temp)
357 return temp;
358 break;
498b529f 359
d1405722 360 case SUBREG:
5da4f548 361 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
6dd12198 362 && GET_MODE (SUBREG_REG (x)) == to_mode)
d1405722
RK
363 return SUBREG_REG (x);
364 break;
365
ea534b63 366 case LABEL_REF:
5da4f548
SE
367 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
368 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
369 return temp;
6dd12198 370 break;
498b529f 371
ea534b63 372 case SYMBOL_REF:
ce02ba25
EC
373 temp = shallow_copy_rtx (x);
374 PUT_MODE (temp, to_mode);
5da4f548 375 return temp;
6dd12198 376 break;
ea534b63 377
498b529f 378 case CONST:
5da4f548 379 return gen_rtx_CONST (to_mode,
d4ebfa65
BE
380 convert_memory_address_addr_space
381 (to_mode, XEXP (x, 0), as));
6dd12198 382 break;
ea534b63 383
0b04ec8c
RK
384 case PLUS:
385 case MULT:
aa0f70e6
SE
386 /* For addition we can safely permute the conversion and addition
387 operation if one operand is a constant and converting the constant
17939c98
SE
388 does not change it or if one operand is a constant and we are
389 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
390 We can always safely permute them if we are making the address
391 narrower. */
aa0f70e6
SE
392 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
393 || (GET_CODE (x) == PLUS
481683e1 394 && CONST_INT_P (XEXP (x, 1))
d4ebfa65
BE
395 && (XEXP (x, 1) == convert_memory_address_addr_space
396 (to_mode, XEXP (x, 1), as)
17939c98 397 || POINTERS_EXTEND_UNSIGNED < 0)))
d9b3eb63 398 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
d4ebfa65
BE
399 convert_memory_address_addr_space
400 (to_mode, XEXP (x, 0), as),
aa0f70e6 401 XEXP (x, 1));
38a448ca 402 break;
d9b3eb63 403
38a448ca
RH
404 default:
405 break;
ea534b63 406 }
0b04ec8c
RK
407
408 return convert_modes (to_mode, from_mode,
409 x, POINTERS_EXTEND_UNSIGNED);
5ae6cd0d 410#endif /* defined(POINTERS_EXTEND_UNSIGNED) */
ea534b63 411}
18ca7dab 412\f
09e881c9
BE
413/* Return something equivalent to X but valid as a memory address for something
414 of mode MODE in the named address space AS. When X is not itself valid,
415 this works by copying X or subexpressions of it into registers. */
18ca7dab
RK
416
417rtx
09e881c9 418memory_address_addr_space (enum machine_mode mode, rtx x, addr_space_t as)
18ca7dab 419{
b3694847 420 rtx oldx = x;
d4ebfa65 421 enum machine_mode address_mode = targetm.addr_space.address_mode (as);
18ca7dab 422
d4ebfa65 423 x = convert_memory_address_addr_space (address_mode, x, as);
ea534b63 424
ba228239 425 /* By passing constant addresses through registers
18ca7dab 426 we get a chance to cse them. */
cabeca29 427 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
d4ebfa65 428 x = force_reg (address_mode, x);
18ca7dab 429
18ca7dab
RK
430 /* We get better cse by rejecting indirect addressing at this stage.
431 Let the combiner create indirect addresses where appropriate.
432 For now, generate the code so that the subexpressions useful to share
433 are visible. But not if cse won't be done! */
18b9ca6f 434 else
18ca7dab 435 {
f8cfc6aa 436 if (! cse_not_expected && !REG_P (x))
18b9ca6f
RK
437 x = break_out_memory_refs (x);
438
439 /* At this point, any valid address is accepted. */
09e881c9 440 if (memory_address_addr_space_p (mode, x, as))
3de5e93a 441 goto done;
18b9ca6f
RK
442
443 /* If it was valid before but breaking out memory refs invalidated it,
444 use it the old way. */
09e881c9 445 if (memory_address_addr_space_p (mode, oldx, as))
3de5e93a
SB
446 {
447 x = oldx;
448 goto done;
449 }
18b9ca6f
RK
450
451 /* Perform machine-dependent transformations on X
452 in certain cases. This is not necessary since the code
453 below can handle all possible cases, but machine-dependent
454 transformations can make better code. */
506d7b68 455 {
09e881c9
BE
456 rtx orig_x = x;
457 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
458 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
506d7b68
PB
459 goto done;
460 }
18b9ca6f
RK
461
462 /* PLUS and MULT can appear in special ways
463 as the result of attempts to make an address usable for indexing.
464 Usually they are dealt with by calling force_operand, below.
465 But a sum containing constant terms is special
466 if removing them makes the sum a valid address:
467 then we generate that address in a register
468 and index off of it. We do this because it often makes
469 shorter code, and because the addresses thus generated
470 in registers often become common subexpressions. */
471 if (GET_CODE (x) == PLUS)
472 {
473 rtx constant_term = const0_rtx;
474 rtx y = eliminate_constant_term (x, &constant_term);
475 if (constant_term == const0_rtx
09e881c9 476 || ! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
477 x = force_operand (x, NULL_RTX);
478 else
479 {
38a448ca 480 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
09e881c9 481 if (! memory_address_addr_space_p (mode, y, as))
18b9ca6f
RK
482 x = force_operand (x, NULL_RTX);
483 else
484 x = y;
485 }
486 }
18ca7dab 487
e475ed2a 488 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 489 x = force_operand (x, NULL_RTX);
18ca7dab 490
18b9ca6f
RK
491 /* If we have a register that's an invalid address,
492 it must be a hard reg of the wrong class. Copy it to a pseudo. */
f8cfc6aa 493 else if (REG_P (x))
18b9ca6f
RK
494 x = copy_to_reg (x);
495
496 /* Last resort: copy the value to a register, since
497 the register is a valid address. */
498 else
d4ebfa65 499 x = force_reg (address_mode, x);
18ca7dab 500 }
18b9ca6f
RK
501
502 done:
503
09e881c9 504 gcc_assert (memory_address_addr_space_p (mode, x, as));
2cca6e3f
RK
505 /* If we didn't change the address, we are done. Otherwise, mark
506 a reg as a pointer if we have REG or REG + CONST_INT. */
507 if (oldx == x)
508 return x;
f8cfc6aa 509 else if (REG_P (x))
bdb429a5 510 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f 511 else if (GET_CODE (x) == PLUS
f8cfc6aa 512 && REG_P (XEXP (x, 0))
481683e1 513 && CONST_INT_P (XEXP (x, 1)))
bdb429a5 514 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 515
18b9ca6f
RK
516 /* OLDX may have been the address on a temporary. Update the address
517 to indicate that X is now used. */
518 update_temp_slot_address (oldx, x);
519
18ca7dab
RK
520 return x;
521}
522
18ca7dab
RK
523/* Convert a mem ref into one with a valid memory address.
524 Pass through anything else unchanged. */
525
526rtx
502b8322 527validize_mem (rtx ref)
18ca7dab 528{
3c0cb5de 529 if (!MEM_P (ref))
18ca7dab 530 return ref;
aacd3885 531 ref = use_anchored_address (ref);
09e881c9
BE
532 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
533 MEM_ADDR_SPACE (ref)))
18ca7dab 534 return ref;
792760b9 535
18ca7dab 536 /* Don't alter REF itself, since that is probably a stack slot. */
792760b9 537 return replace_equiv_address (ref, XEXP (ref, 0));
18ca7dab 538}
aacd3885
RS
539
540/* If X is a memory reference to a member of an object block, try rewriting
541 it to use an anchor instead. Return the new memory reference on success
542 and the old one on failure. */
543
544rtx
545use_anchored_address (rtx x)
546{
547 rtx base;
548 HOST_WIDE_INT offset;
549
550 if (!flag_section_anchors)
551 return x;
552
553 if (!MEM_P (x))
554 return x;
555
556 /* Split the address into a base and offset. */
557 base = XEXP (x, 0);
558 offset = 0;
559 if (GET_CODE (base) == CONST
560 && GET_CODE (XEXP (base, 0)) == PLUS
481683e1 561 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
aacd3885
RS
562 {
563 offset += INTVAL (XEXP (XEXP (base, 0), 1));
564 base = XEXP (XEXP (base, 0), 0);
565 }
566
567 /* Check whether BASE is suitable for anchors. */
568 if (GET_CODE (base) != SYMBOL_REF
3fa9c136 569 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
aacd3885 570 || SYMBOL_REF_ANCHOR_P (base)
434aeebb 571 || SYMBOL_REF_BLOCK (base) == NULL
aacd3885
RS
572 || !targetm.use_anchors_for_symbol_p (base))
573 return x;
574
575 /* Decide where BASE is going to be. */
576 place_block_symbol (base);
577
578 /* Get the anchor we need to use. */
579 offset += SYMBOL_REF_BLOCK_OFFSET (base);
580 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
581 SYMBOL_REF_TLS_MODEL (base));
582
583 /* Work out the offset from the anchor. */
584 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
585
586 /* If we're going to run a CSE pass, force the anchor into a register.
587 We will then be able to reuse registers for several accesses, if the
588 target costs say that that's worthwhile. */
589 if (!cse_not_expected)
590 base = force_reg (GET_MODE (base), base);
591
592 return replace_equiv_address (x, plus_constant (base, offset));
593}
18ca7dab 594\f
18ca7dab
RK
595/* Copy the value or contents of X to a new temp reg and return that reg. */
596
597rtx
502b8322 598copy_to_reg (rtx x)
18ca7dab 599{
b3694847 600 rtx temp = gen_reg_rtx (GET_MODE (x));
d9b3eb63 601
18ca7dab 602 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 603 do the computation. */
18ca7dab
RK
604 if (! general_operand (x, VOIDmode))
605 x = force_operand (x, temp);
d9b3eb63 606
18ca7dab
RK
607 if (x != temp)
608 emit_move_insn (temp, x);
609
610 return temp;
611}
612
613/* Like copy_to_reg but always give the new register mode Pmode
614 in case X is a constant. */
615
616rtx
502b8322 617copy_addr_to_reg (rtx x)
18ca7dab
RK
618{
619 return copy_to_mode_reg (Pmode, x);
620}
621
622/* Like copy_to_reg but always give the new register mode MODE
623 in case X is a constant. */
624
625rtx
502b8322 626copy_to_mode_reg (enum machine_mode mode, rtx x)
18ca7dab 627{
b3694847 628 rtx temp = gen_reg_rtx (mode);
d9b3eb63 629
18ca7dab 630 /* If not an operand, must be an address with PLUS and MULT so
d9b3eb63 631 do the computation. */
18ca7dab
RK
632 if (! general_operand (x, VOIDmode))
633 x = force_operand (x, temp);
634
5b0264cb 635 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
18ca7dab
RK
636 if (x != temp)
637 emit_move_insn (temp, x);
638 return temp;
639}
640
641/* Load X into a register if it is not already one.
642 Use mode MODE for the register.
643 X should be valid for mode MODE, but it may be a constant which
644 is valid for all integer modes; that's why caller must specify MODE.
645
646 The caller must not alter the value in the register we return,
647 since we mark it as a "constant" register. */
648
649rtx
502b8322 650force_reg (enum machine_mode mode, rtx x)
18ca7dab 651{
b3694847 652 rtx temp, insn, set;
18ca7dab 653
f8cfc6aa 654 if (REG_P (x))
18ca7dab 655 return x;
d9b3eb63 656
e3c8ea67
RH
657 if (general_operand (x, mode))
658 {
659 temp = gen_reg_rtx (mode);
660 insn = emit_move_insn (temp, x);
661 }
662 else
663 {
664 temp = force_operand (x, NULL_RTX);
f8cfc6aa 665 if (REG_P (temp))
e3c8ea67
RH
666 insn = get_last_insn ();
667 else
668 {
669 rtx temp2 = gen_reg_rtx (mode);
670 insn = emit_move_insn (temp2, temp);
671 temp = temp2;
672 }
673 }
62874575 674
18ca7dab 675 /* Let optimizers know that TEMP's value never changes
62874575
RK
676 and that X can be substituted for it. Don't get confused
677 if INSN set something else (such as a SUBREG of TEMP). */
678 if (CONSTANT_P (x)
679 && (set = single_set (insn)) != 0
fd7acc30
RS
680 && SET_DEST (set) == temp
681 && ! rtx_equal_p (x, SET_SRC (set)))
3d238248 682 set_unique_reg_note (insn, REG_EQUAL, x);
e3c8ea67 683
4a4f95d9
RH
684 /* Let optimizers know that TEMP is a pointer, and if so, the
685 known alignment of that pointer. */
686 {
687 unsigned align = 0;
688 if (GET_CODE (x) == SYMBOL_REF)
689 {
690 align = BITS_PER_UNIT;
691 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
692 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
693 }
694 else if (GET_CODE (x) == LABEL_REF)
695 align = BITS_PER_UNIT;
696 else if (GET_CODE (x) == CONST
697 && GET_CODE (XEXP (x, 0)) == PLUS
698 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
481683e1 699 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
4a4f95d9
RH
700 {
701 rtx s = XEXP (XEXP (x, 0), 0);
702 rtx c = XEXP (XEXP (x, 0), 1);
703 unsigned sa, ca;
704
705 sa = BITS_PER_UNIT;
706 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
707 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
708
bd95721f
RH
709 if (INTVAL (c) == 0)
710 align = sa;
711 else
712 {
713 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
714 align = MIN (sa, ca);
715 }
4a4f95d9
RH
716 }
717
0a317111 718 if (align || (MEM_P (x) && MEM_POINTER (x)))
4a4f95d9
RH
719 mark_reg_pointer (temp, align);
720 }
721
18ca7dab
RK
722 return temp;
723}
724
725/* If X is a memory ref, copy its contents to a new temp reg and return
726 that reg. Otherwise, return X. */
727
728rtx
502b8322 729force_not_mem (rtx x)
18ca7dab 730{
b3694847 731 rtx temp;
fe3439b0 732
3c0cb5de 733 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
18ca7dab 734 return x;
fe3439b0 735
18ca7dab 736 temp = gen_reg_rtx (GET_MODE (x));
f8ad8d7c
ZD
737
738 if (MEM_POINTER (x))
739 REG_POINTER (temp) = 1;
740
18ca7dab
RK
741 emit_move_insn (temp, x);
742 return temp;
743}
744
745/* Copy X to TARGET (if it's nonzero and a reg)
746 or to a new temp reg and return that reg.
747 MODE is the mode to use for X in case it is a constant. */
748
749rtx
502b8322 750copy_to_suggested_reg (rtx x, rtx target, enum machine_mode mode)
18ca7dab 751{
b3694847 752 rtx temp;
18ca7dab 753
f8cfc6aa 754 if (target && REG_P (target))
18ca7dab
RK
755 temp = target;
756 else
757 temp = gen_reg_rtx (mode);
758
759 emit_move_insn (temp, x);
760 return temp;
761}
762\f
cde0f3fd 763/* Return the mode to use to pass or return a scalar of TYPE and MODE.
9ff65789
RK
764 PUNSIGNEDP points to the signedness of the type and may be adjusted
765 to show what signedness to use on extension operations.
766
cde0f3fd
PB
767 FOR_RETURN is nonzero if the caller is promoting the return value
768 of FNDECL, else it is for promoting args. */
9ff65789 769
cde0f3fd
PB
770enum machine_mode
771promote_function_mode (const_tree type, enum machine_mode mode, int *punsignedp,
772 const_tree funtype, int for_return)
773{
cde0f3fd
PB
774 switch (TREE_CODE (type))
775 {
776 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
777 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
778 case POINTER_TYPE: case REFERENCE_TYPE:
779 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
780 for_return);
781
782 default:
783 return mode;
784 }
785}
786/* Return the mode to use to store a scalar of TYPE and MODE.
787 PUNSIGNEDP points to the signedness of the type and may be adjusted
788 to show what signedness to use on extension operations. */
d4453b7a 789
9ff65789 790enum machine_mode
b1680483
AK
791promote_mode (const_tree type ATTRIBUTE_UNUSED, enum machine_mode mode,
792 int *punsignedp ATTRIBUTE_UNUSED)
9ff65789 793{
cde0f3fd
PB
794 /* FIXME: this is the same logic that was there until GCC 4.4, but we
795 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
796 is not defined. The affected targets are M32C, S390, SPARC. */
797#ifdef PROMOTE_MODE
586de218 798 const enum tree_code code = TREE_CODE (type);
9ff65789
RK
799 int unsignedp = *punsignedp;
800
9ff65789
RK
801 switch (code)
802 {
9ff65789 803 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
325217ed 804 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
cde0f3fd
PB
805 PROMOTE_MODE (mode, unsignedp, type);
806 *punsignedp = unsignedp;
807 return mode;
9ff65789 808 break;
9ff65789 809
ea534b63 810#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 811 case REFERENCE_TYPE:
9ff65789 812 case POINTER_TYPE:
cde0f3fd 813 *punsignedp = POINTERS_EXTEND_UNSIGNED;
d4ebfa65
BE
814 return targetm.addr_space.address_mode
815 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
9ff65789 816 break;
ea534b63 817#endif
d9b3eb63 818
38a448ca 819 default:
cde0f3fd 820 return mode;
9ff65789 821 }
cde0f3fd 822#else
9ff65789 823 return mode;
cde0f3fd 824#endif
9ff65789 825}
cde0f3fd
PB
826
827
828/* Use one of promote_mode or promote_function_mode to find the promoted
829 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
830 of DECL after promotion. */
831
832enum machine_mode
833promote_decl_mode (const_tree decl, int *punsignedp)
834{
835 tree type = TREE_TYPE (decl);
836 int unsignedp = TYPE_UNSIGNED (type);
837 enum machine_mode mode = DECL_MODE (decl);
838 enum machine_mode pmode;
839
666e3ceb
PB
840 if (TREE_CODE (decl) == RESULT_DECL
841 || TREE_CODE (decl) == PARM_DECL)
cde0f3fd 842 pmode = promote_function_mode (type, mode, &unsignedp,
666e3ceb 843 TREE_TYPE (current_function_decl), 2);
cde0f3fd
PB
844 else
845 pmode = promote_mode (type, mode, &unsignedp);
846
847 if (punsignedp)
848 *punsignedp = unsignedp;
849 return pmode;
850}
851
9ff65789 852\f
18ca7dab
RK
853/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
854 This pops when ADJUST is positive. ADJUST need not be constant. */
855
856void
502b8322 857adjust_stack (rtx adjust)
18ca7dab
RK
858{
859 rtx temp;
18ca7dab
RK
860
861 if (adjust == const0_rtx)
862 return;
863
1503a7ec
JH
864 /* We expect all variable sized adjustments to be multiple of
865 PREFERRED_STACK_BOUNDARY. */
481683e1 866 if (CONST_INT_P (adjust))
1503a7ec
JH
867 stack_pointer_delta -= INTVAL (adjust);
868
18ca7dab
RK
869 temp = expand_binop (Pmode,
870#ifdef STACK_GROWS_DOWNWARD
871 add_optab,
872#else
873 sub_optab,
874#endif
875 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
876 OPTAB_LIB_WIDEN);
877
878 if (temp != stack_pointer_rtx)
879 emit_move_insn (stack_pointer_rtx, temp);
880}
881
882/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
883 This pushes when ADJUST is positive. ADJUST need not be constant. */
884
885void
502b8322 886anti_adjust_stack (rtx adjust)
18ca7dab
RK
887{
888 rtx temp;
18ca7dab
RK
889
890 if (adjust == const0_rtx)
891 return;
892
1503a7ec
JH
893 /* We expect all variable sized adjustments to be multiple of
894 PREFERRED_STACK_BOUNDARY. */
481683e1 895 if (CONST_INT_P (adjust))
1503a7ec
JH
896 stack_pointer_delta += INTVAL (adjust);
897
18ca7dab
RK
898 temp = expand_binop (Pmode,
899#ifdef STACK_GROWS_DOWNWARD
900 sub_optab,
901#else
902 add_optab,
903#endif
904 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
905 OPTAB_LIB_WIDEN);
906
907 if (temp != stack_pointer_rtx)
908 emit_move_insn (stack_pointer_rtx, temp);
909}
910
911/* Round the size of a block to be pushed up to the boundary required
912 by this machine. SIZE is the desired size, which need not be constant. */
913
4dd9b044 914static rtx
502b8322 915round_push (rtx size)
18ca7dab 916{
32990d5b 917 rtx align_rtx, alignm1_rtx;
41ee3204 918
32990d5b
JJ
919 if (!SUPPORTS_STACK_ALIGNMENT
920 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
18ca7dab 921 {
32990d5b
JJ
922 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
923
924 if (align == 1)
925 return size;
926
927 if (CONST_INT_P (size))
928 {
929 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
41ee3204 930
32990d5b
JJ
931 if (INTVAL (size) != new_size)
932 size = GEN_INT (new_size);
933 return size;
934 }
935
936 align_rtx = GEN_INT (align);
937 alignm1_rtx = GEN_INT (align - 1);
18ca7dab
RK
938 }
939 else
940 {
32990d5b
JJ
941 /* If crtl->preferred_stack_boundary might still grow, use
942 virtual_preferred_stack_boundary_rtx instead. This will be
943 substituted by the right value in vregs pass and optimized
944 during combine. */
945 align_rtx = virtual_preferred_stack_boundary_rtx;
946 alignm1_rtx = force_operand (plus_constant (align_rtx, -1), NULL_RTX);
18ca7dab 947 }
41ee3204 948
32990d5b
JJ
949 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
950 but we know it can't. So add ourselves and then do
951 TRUNC_DIV_EXPR. */
952 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
953 NULL_RTX, 1, OPTAB_LIB_WIDEN);
954 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
955 NULL_RTX, 1);
956 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
957
18ca7dab
RK
958 return size;
959}
960\f
59257ff7
RK
961/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
962 to a previously-created save area. If no save area has been allocated,
963 this function will allocate one. If a save area is specified, it
964 must be of the proper mode.
965
966 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
967 are emitted at the current position. */
968
969void
502b8322 970emit_stack_save (enum save_level save_level, rtx *psave, rtx after)
59257ff7
RK
971{
972 rtx sa = *psave;
973 /* The default is that we use a move insn and save in a Pmode object. */
502b8322 974 rtx (*fcn) (rtx, rtx) = gen_move_insn;
a260abc9 975 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
976
977 /* See if this machine has anything special to do for this kind of save. */
978 switch (save_level)
979 {
980#ifdef HAVE_save_stack_block
981 case SAVE_BLOCK:
982 if (HAVE_save_stack_block)
a260abc9 983 fcn = gen_save_stack_block;
59257ff7
RK
984 break;
985#endif
986#ifdef HAVE_save_stack_function
987 case SAVE_FUNCTION:
988 if (HAVE_save_stack_function)
a260abc9 989 fcn = gen_save_stack_function;
59257ff7
RK
990 break;
991#endif
992#ifdef HAVE_save_stack_nonlocal
993 case SAVE_NONLOCAL:
994 if (HAVE_save_stack_nonlocal)
a260abc9 995 fcn = gen_save_stack_nonlocal;
59257ff7
RK
996 break;
997#endif
38a448ca
RH
998 default:
999 break;
59257ff7
RK
1000 }
1001
1002 /* If there is no save area and we have to allocate one, do so. Otherwise
1003 verify the save area is the proper mode. */
1004
1005 if (sa == 0)
1006 {
1007 if (mode != VOIDmode)
1008 {
1009 if (save_level == SAVE_NONLOCAL)
1010 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1011 else
1012 *psave = sa = gen_reg_rtx (mode);
1013 }
1014 }
59257ff7
RK
1015
1016 if (after)
700f6f98
RK
1017 {
1018 rtx seq;
1019
1020 start_sequence ();
a494ed43 1021 do_pending_stack_adjust ();
5460015d
JW
1022 /* We must validize inside the sequence, to ensure that any instructions
1023 created by the validize call also get moved to the right place. */
1024 if (sa != 0)
1025 sa = validize_mem (sa);
d072107f 1026 emit_insn (fcn (sa, stack_pointer_rtx));
2f937369 1027 seq = get_insns ();
700f6f98
RK
1028 end_sequence ();
1029 emit_insn_after (seq, after);
1030 }
59257ff7 1031 else
5460015d 1032 {
a494ed43 1033 do_pending_stack_adjust ();
5460015d
JW
1034 if (sa != 0)
1035 sa = validize_mem (sa);
1036 emit_insn (fcn (sa, stack_pointer_rtx));
1037 }
59257ff7
RK
1038}
1039
1040/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
d9b3eb63 1041 area made by emit_stack_save. If it is zero, we have nothing to do.
59257ff7 1042
d9b3eb63 1043 Put any emitted insns after insn AFTER, if nonzero, otherwise at
59257ff7
RK
1044 current position. */
1045
1046void
502b8322 1047emit_stack_restore (enum save_level save_level, rtx sa, rtx after)
59257ff7
RK
1048{
1049 /* The default is that we use a move insn. */
502b8322 1050 rtx (*fcn) (rtx, rtx) = gen_move_insn;
59257ff7
RK
1051
1052 /* See if this machine has anything special to do for this kind of save. */
1053 switch (save_level)
1054 {
1055#ifdef HAVE_restore_stack_block
1056 case SAVE_BLOCK:
1057 if (HAVE_restore_stack_block)
1058 fcn = gen_restore_stack_block;
1059 break;
1060#endif
1061#ifdef HAVE_restore_stack_function
1062 case SAVE_FUNCTION:
1063 if (HAVE_restore_stack_function)
1064 fcn = gen_restore_stack_function;
1065 break;
1066#endif
1067#ifdef HAVE_restore_stack_nonlocal
59257ff7
RK
1068 case SAVE_NONLOCAL:
1069 if (HAVE_restore_stack_nonlocal)
1070 fcn = gen_restore_stack_nonlocal;
1071 break;
1072#endif
38a448ca
RH
1073 default:
1074 break;
59257ff7
RK
1075 }
1076
d072107f 1077 if (sa != 0)
260f91c2
DJ
1078 {
1079 sa = validize_mem (sa);
1080 /* These clobbers prevent the scheduler from moving
1081 references to variable arrays below the code
4b7e68e7 1082 that deletes (pops) the arrays. */
c41c1387
RS
1083 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1084 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
260f91c2 1085 }
d072107f 1086
a494ed43
EB
1087 discard_pending_stack_adjust ();
1088
59257ff7 1089 if (after)
700f6f98
RK
1090 {
1091 rtx seq;
1092
1093 start_sequence ();
d072107f 1094 emit_insn (fcn (stack_pointer_rtx, sa));
2f937369 1095 seq = get_insns ();
700f6f98
RK
1096 end_sequence ();
1097 emit_insn_after (seq, after);
1098 }
59257ff7 1099 else
d072107f 1100 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7 1101}
6de9cd9a
DN
1102
1103/* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1104 function. This function should be called whenever we allocate or
1105 deallocate dynamic stack space. */
1106
1107void
1108update_nonlocal_goto_save_area (void)
1109{
1110 tree t_save;
1111 rtx r_save;
1112
1113 /* The nonlocal_goto_save_area object is an array of N pointers. The
1114 first one is used for the frame pointer save; the rest are sized by
1115 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1116 of the stack save area slots. */
3244e67d
RS
1117 t_save = build4 (ARRAY_REF, ptr_type_node, cfun->nonlocal_goto_save_area,
1118 integer_one_node, NULL_TREE, NULL_TREE);
6de9cd9a
DN
1119 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1120
1121 emit_stack_save (SAVE_NONLOCAL, &r_save, NULL_RTX);
1122}
59257ff7 1123\f
18ca7dab 1124/* Return an rtx representing the address of an area of memory dynamically
3a42502d 1125 pushed on the stack.
18ca7dab
RK
1126
1127 Any required stack pointer alignment is preserved.
1128
1129 SIZE is an rtx representing the size of the area.
091ad0b9 1130
3a42502d
RH
1131 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1132 parameter may be zero. If so, a proper value will be extracted
1133 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1134
1135 REQUIRED_ALIGN is the alignment (in bits) required for the region
1136 of memory.
d3c12306
EB
1137
1138 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1139 stack space allocated by the generated code cannot be added with itself
1140 in the course of the execution of the function. It is always safe to
1141 pass FALSE here and the following criterion is sufficient in order to
1142 pass TRUE: every path in the CFG that starts at the allocation point and
1143 loops to it executes the associated deallocation code. */
18ca7dab
RK
1144
1145rtx
3a42502d
RH
1146allocate_dynamic_stack_space (rtx size, unsigned size_align,
1147 unsigned required_align, bool cannot_accumulate)
18ca7dab 1148{
d3c12306 1149 HOST_WIDE_INT stack_usage_size = -1;
3a42502d
RH
1150 rtx final_label, final_target, target;
1151 bool must_align;
d3c12306 1152
15fc0026 1153 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1154 to since we can't dereference it. But return a reasonable
15fc0026
RK
1155 address anyway. */
1156 if (size == const0_rtx)
1157 return virtual_stack_dynamic_rtx;
1158
1159 /* Otherwise, show we're calling alloca or equivalent. */
e3b5732b 1160 cfun->calls_alloca = 1;
15fc0026 1161
d3c12306
EB
1162 /* If stack usage info is requested, look into the size we are passed.
1163 We need to do so this early to avoid the obfuscation that may be
1164 introduced later by the various alignment operations. */
1165 if (flag_stack_usage)
1166 {
32990d5b 1167 if (CONST_INT_P (size))
d3c12306 1168 stack_usage_size = INTVAL (size);
32990d5b 1169 else if (REG_P (size))
d3c12306
EB
1170 {
1171 /* Look into the last emitted insn and see if we can deduce
1172 something for the register. */
1173 rtx insn, set, note;
1174 insn = get_last_insn ();
1175 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1176 {
32990d5b 1177 if (CONST_INT_P (SET_SRC (set)))
d3c12306
EB
1178 stack_usage_size = INTVAL (SET_SRC (set));
1179 else if ((note = find_reg_equal_equiv_note (insn))
32990d5b 1180 && CONST_INT_P (XEXP (note, 0)))
d3c12306
EB
1181 stack_usage_size = INTVAL (XEXP (note, 0));
1182 }
1183 }
1184
1185 /* If the size is not constant, we can't say anything. */
1186 if (stack_usage_size == -1)
1187 {
1188 current_function_has_unbounded_dynamic_stack_size = 1;
1189 stack_usage_size = 0;
1190 }
1191 }
1192
18ca7dab
RK
1193 /* Ensure the size is in the proper mode. */
1194 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1195 size = convert_to_mode (Pmode, size, 1);
1196
3a42502d
RH
1197 /* Adjust SIZE_ALIGN, if needed. */
1198 if (CONST_INT_P (size))
1199 {
1200 unsigned HOST_WIDE_INT lsb;
1201
1202 lsb = INTVAL (size);
1203 lsb &= -lsb;
1204
1205 /* Watch out for overflow truncating to "unsigned". */
1206 if (lsb > UINT_MAX / BITS_PER_UNIT)
1207 size_align = 1u << (HOST_BITS_PER_INT - 1);
1208 else
1209 size_align = (unsigned)lsb * BITS_PER_UNIT;
1210 }
1211 else if (size_align < BITS_PER_UNIT)
1212 size_align = BITS_PER_UNIT;
1213
c2f8b491
JH
1214 /* We can't attempt to minimize alignment necessary, because we don't
1215 know the final value of preferred_stack_boundary yet while executing
1216 this code. */
32990d5b
JJ
1217 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1218 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
c2f8b491 1219
18ca7dab 1220 /* We will need to ensure that the address we return is aligned to
3a42502d 1221 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
d9b3eb63 1222 always know its final value at this point in the compilation (it
18ca7dab
RK
1223 might depend on the size of the outgoing parameter lists, for
1224 example), so we must align the value to be returned in that case.
cc2902df 1225 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
18ca7dab
RK
1226 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1227 We must also do an alignment operation on the returned value if
3a42502d 1228 the stack pointer alignment is less strict than REQUIRED_ALIGN.
18ca7dab
RK
1229
1230 If we have to align, we must leave space in SIZE for the hole
1231 that might result from the alignment operation. */
1232
3a42502d 1233 must_align = (crtl->preferred_stack_boundary < required_align);
31cdd499 1234#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
3a42502d 1235 must_align = true;
18ca7dab
RK
1236#endif
1237
3a42502d 1238 if (must_align)
d3c12306 1239 {
3a42502d
RH
1240 unsigned extra, extra_align;
1241
1242 if (required_align > PREFERRED_STACK_BOUNDARY)
1243 extra_align = PREFERRED_STACK_BOUNDARY;
1244 else if (required_align > STACK_BOUNDARY)
1245 extra_align = STACK_BOUNDARY;
1246 else
1247 extra_align = BITS_PER_UNIT;
1248 extra = (required_align - extra_align) / BITS_PER_UNIT;
1249
1250 size = plus_constant (size, extra);
1251 size = force_operand (size, NULL_RTX);
d3c12306
EB
1252
1253 if (flag_stack_usage)
3a42502d 1254 stack_usage_size += extra;
d3c12306 1255
3a42502d
RH
1256 if (extra && size_align > extra_align)
1257 size_align = extra_align;
d3c12306 1258 }
1d9d04f8 1259
18ca7dab
RK
1260#ifdef SETJMP_VIA_SAVE_AREA
1261 /* If setjmp restores regs from a save area in the stack frame,
1262 avoid clobbering the reg save area. Note that the offset of
1263 virtual_incoming_args_rtx includes the preallocated stack args space.
1264 It would be no problem to clobber that, but it's on the wrong side
d0828b31
DM
1265 of the old save area.
1266
1267 What used to happen is that, since we did not know for sure
1268 whether setjmp() was invoked until after RTL generation, we
1269 would use reg notes to store the "optimized" size and fix things
1270 up later. These days we know this information before we ever
1271 start building RTL so the reg notes are unnecessary. */
d3c12306 1272 if (cfun->calls_setjmp)
d0828b31
DM
1273 {
1274 rtx dynamic_offset
1275 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1276 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1277
1278 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1279 NULL_RTX, 1, OPTAB_LIB_WIDEN);
d3c12306
EB
1280
1281 /* The above dynamic offset cannot be computed statically at this
1282 point, but it will be possible to do so after RTL expansion is
1283 done. Record how many times we will need to add it. */
1284 if (flag_stack_usage)
1285 current_function_dynamic_alloc_count++;
1286
3a42502d
RH
1287 /* ??? Can we infer a minimum of STACK_BOUNDARY here? */
1288 size_align = BITS_PER_UNIT;
d0828b31 1289 }
18ca7dab
RK
1290#endif /* SETJMP_VIA_SAVE_AREA */
1291
1292 /* Round the size to a multiple of the required stack alignment.
1293 Since the stack if presumed to be rounded before this allocation,
1294 this will maintain the required alignment.
1295
1296 If the stack grows downward, we could save an insn by subtracting
1297 SIZE from the stack pointer and then aligning the stack pointer.
1298 The problem with this is that the stack pointer may be unaligned
1299 between the execution of the subtraction and alignment insns and
1300 some machines do not allow this. Even on those that do, some
1301 signal handlers malfunction if a signal should occur between those
1302 insns. Since this is an extremely rare event, we have no reliable
1303 way of knowing which systems have this problem. So we avoid even
1304 momentarily mis-aligning the stack. */
3a42502d 1305 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
d3c12306
EB
1306 {
1307 size = round_push (size);
18ca7dab 1308
d3c12306
EB
1309 if (flag_stack_usage)
1310 {
32990d5b 1311 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
d3c12306
EB
1312 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1313 }
1314 }
1315
3a42502d 1316 target = gen_reg_rtx (Pmode);
7458026b 1317
d3c12306
EB
1318 /* The size is supposed to be fully adjusted at this point so record it
1319 if stack usage info is requested. */
1320 if (flag_stack_usage)
1321 {
1322 current_function_dynamic_stack_size += stack_usage_size;
1323
1324 /* ??? This is gross but the only safe stance in the absence
1325 of stack usage oriented flow analysis. */
1326 if (!cannot_accumulate)
1327 current_function_has_unbounded_dynamic_stack_size = 1;
1328 }
18ca7dab 1329
7458026b
ILT
1330 final_label = NULL_RTX;
1331 final_target = NULL_RTX;
1332
1333 /* If we are splitting the stack, we need to ask the backend whether
1334 there is enough room on the current stack. If there isn't, or if
1335 the backend doesn't know how to tell is, then we need to call a
1336 function to allocate memory in some other way. This memory will
1337 be released when we release the current stack segment. The
1338 effect is that stack allocation becomes less efficient, but at
1339 least it doesn't cause a stack overflow. */
1340 if (flag_split_stack)
1341 {
c3928dde 1342 rtx available_label, ask, space, func;
7458026b
ILT
1343
1344 available_label = NULL_RTX;
1345
1346#ifdef HAVE_split_stack_space_check
1347 if (HAVE_split_stack_space_check)
1348 {
1349 available_label = gen_label_rtx ();
1350
1351 /* This instruction will branch to AVAILABLE_LABEL if there
1352 are SIZE bytes available on the stack. */
1353 emit_insn (gen_split_stack_space_check (size, available_label));
1354 }
1355#endif
1356
c3928dde 1357 /* The __morestack_allocate_stack_space function will allocate
c070a3b9
ILT
1358 memory using malloc. If the alignment of the memory returned
1359 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1360 make sure we allocate enough space. */
1361 if (MALLOC_ABI_ALIGNMENT >= required_align)
1362 ask = size;
1363 else
1364 {
1365 ask = expand_binop (Pmode, add_optab, size,
1366 GEN_INT (required_align / BITS_PER_UNIT - 1),
1367 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1368 must_align = true;
1369 }
c3928dde 1370
7458026b
ILT
1371 func = init_one_libfunc ("__morestack_allocate_stack_space");
1372
1373 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
c3928dde 1374 1, ask, Pmode);
7458026b
ILT
1375
1376 if (available_label == NULL_RTX)
1377 return space;
1378
1379 final_target = gen_reg_rtx (Pmode);
7458026b
ILT
1380
1381 emit_move_insn (final_target, space);
1382
1383 final_label = gen_label_rtx ();
1384 emit_jump (final_label);
1385
1386 emit_label (available_label);
1387 }
1388
18ca7dab
RK
1389 do_pending_stack_adjust ();
1390
1503a7ec 1391 /* We ought to be called always on the toplevel and stack ought to be aligned
a1f300c0 1392 properly. */
5b0264cb
NS
1393 gcc_assert (!(stack_pointer_delta
1394 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1503a7ec 1395
d809253a
EB
1396 /* If needed, check that we have the required amount of stack. Take into
1397 account what has already been checked. */
1398 if (STACK_CHECK_MOVING_SP)
1399 ;
1400 else if (flag_stack_check == GENERIC_STACK_CHECK)
b38f3813
EB
1401 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1402 size);
1403 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1404 probe_stack_range (STACK_CHECK_PROTECT, size);
edff2491 1405
18ca7dab
RK
1406 /* Perform the required allocation from the stack. Some systems do
1407 this differently than simply incrementing/decrementing from the
38a448ca 1408 stack pointer, such as acquiring the space by calling malloc(). */
18ca7dab
RK
1409#ifdef HAVE_allocate_stack
1410 if (HAVE_allocate_stack)
1411 {
39403d82 1412 enum machine_mode mode = STACK_SIZE_MODE;
a995e389 1413 insn_operand_predicate_fn pred;
39403d82 1414
4b6c1672
RK
1415 /* We don't have to check against the predicate for operand 0 since
1416 TARGET is known to be a pseudo of the proper mode, which must
1417 be valid for the operand. For operand 1, convert to the
1418 proper mode and validate. */
c5c76735 1419 if (mode == VOIDmode)
4b6c1672 1420 mode = insn_data[(int) CODE_FOR_allocate_stack].operand[1].mode;
c5c76735 1421
a995e389
RH
1422 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1423 if (pred && ! ((*pred) (size, mode)))
05d482b9 1424 size = copy_to_mode_reg (mode, convert_to_mode (mode, size, 1));
18ca7dab 1425
38a448ca 1426 emit_insn (gen_allocate_stack (target, size));
18ca7dab
RK
1427 }
1428 else
1429#endif
ea534b63 1430 {
32990d5b
JJ
1431 int saved_stack_pointer_delta;
1432
38a448ca
RH
1433#ifndef STACK_GROWS_DOWNWARD
1434 emit_move_insn (target, virtual_stack_dynamic_rtx);
1435#endif
a157febd
GK
1436
1437 /* Check stack bounds if necessary. */
e3b5732b 1438 if (crtl->limit_stack)
a157febd
GK
1439 {
1440 rtx available;
1441 rtx space_available = gen_label_rtx ();
1442#ifdef STACK_GROWS_DOWNWARD
d9b3eb63 1443 available = expand_binop (Pmode, sub_optab,
a157febd
GK
1444 stack_pointer_rtx, stack_limit_rtx,
1445 NULL_RTX, 1, OPTAB_WIDEN);
1446#else
d9b3eb63 1447 available = expand_binop (Pmode, sub_optab,
a157febd
GK
1448 stack_limit_rtx, stack_pointer_rtx,
1449 NULL_RTX, 1, OPTAB_WIDEN);
1450#endif
1451 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
a06ef755 1452 space_available);
a157febd
GK
1453#ifdef HAVE_trap
1454 if (HAVE_trap)
1455 emit_insn (gen_trap ());
1456 else
1457#endif
1458 error ("stack limits not supported on this target");
1459 emit_barrier ();
1460 emit_label (space_available);
1461 }
1462
32990d5b 1463 saved_stack_pointer_delta = stack_pointer_delta;
d809253a 1464 if (flag_stack_check && STACK_CHECK_MOVING_SP)
c35af30f 1465 anti_adjust_stack_and_probe (size, false);
d809253a
EB
1466 else
1467 anti_adjust_stack (size);
32990d5b
JJ
1468 /* Even if size is constant, don't modify stack_pointer_delta.
1469 The constant size alloca should preserve
1470 crtl->preferred_stack_boundary alignment. */
1471 stack_pointer_delta = saved_stack_pointer_delta;
d5457140 1472
18ca7dab 1473#ifdef STACK_GROWS_DOWNWARD
ca56cd30 1474 emit_move_insn (target, virtual_stack_dynamic_rtx);
18ca7dab 1475#endif
38a448ca 1476 }
18ca7dab 1477
3a42502d
RH
1478 /* Finish up the split stack handling. */
1479 if (final_label != NULL_RTX)
1480 {
1481 gcc_assert (flag_split_stack);
1482 emit_move_insn (final_target, target);
1483 emit_label (final_label);
1484 target = final_target;
1485 }
1486
1487 if (must_align)
091ad0b9 1488 {
5244db05 1489 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1490 but we know it can't. So add ourselves and then do
1491 TRUNC_DIV_EXPR. */
0f56a403 1492 target = expand_binop (Pmode, add_optab, target,
3a42502d 1493 GEN_INT (required_align / BITS_PER_UNIT - 1),
5244db05
RK
1494 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1495 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
3a42502d 1496 GEN_INT (required_align / BITS_PER_UNIT),
b1ec3c92 1497 NULL_RTX, 1);
091ad0b9 1498 target = expand_mult (Pmode, target,
3a42502d 1499 GEN_INT (required_align / BITS_PER_UNIT),
b1ec3c92 1500 NULL_RTX, 1);
091ad0b9 1501 }
d9b3eb63 1502
3a42502d
RH
1503 /* Now that we've committed to a return value, mark its alignment. */
1504 mark_reg_pointer (target, required_align);
1505
15fc0026 1506 /* Record the new stack level for nonlocal gotos. */
6de9cd9a
DN
1507 if (cfun->nonlocal_goto_save_area != 0)
1508 update_nonlocal_goto_save_area ();
15fc0026 1509
18ca7dab
RK
1510 return target;
1511}
1512\f
d9b3eb63 1513/* A front end may want to override GCC's stack checking by providing a
14a774a9
RK
1514 run-time routine to call to check the stack, so provide a mechanism for
1515 calling that routine. */
1516
e2500fed 1517static GTY(()) rtx stack_check_libfunc;
14a774a9
RK
1518
1519void
d477d1fe 1520set_stack_check_libfunc (const char *libfunc_name)
14a774a9 1521{
d477d1fe
SB
1522 gcc_assert (stack_check_libfunc == NULL_RTX);
1523 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
14a774a9
RK
1524}
1525\f
edff2491
RK
1526/* Emit one stack probe at ADDRESS, an address within the stack. */
1527
260c8ba3 1528void
502b8322 1529emit_stack_probe (rtx address)
edff2491 1530{
38a448ca 1531 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491
RK
1532
1533 MEM_VOLATILE_P (memref) = 1;
1534
d809253a
EB
1535 /* See if we have an insn to probe the stack. */
1536#ifdef HAVE_probe_stack
1537 if (HAVE_probe_stack)
1538 emit_insn (gen_probe_stack (memref));
1539 else
1540#endif
edff2491
RK
1541 emit_move_insn (memref, const0_rtx);
1542}
1543
d9b3eb63 1544/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
d809253a
EB
1545 FIRST is a constant and size is a Pmode RTX. These are offsets from
1546 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1547 or subtract them from the stack pointer. */
1548
1549#define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
edff2491
RK
1550
1551#ifdef STACK_GROWS_DOWNWARD
1552#define STACK_GROW_OP MINUS
d809253a
EB
1553#define STACK_GROW_OPTAB sub_optab
1554#define STACK_GROW_OFF(off) -(off)
edff2491
RK
1555#else
1556#define STACK_GROW_OP PLUS
d809253a
EB
1557#define STACK_GROW_OPTAB add_optab
1558#define STACK_GROW_OFF(off) (off)
edff2491
RK
1559#endif
1560
1561void
502b8322 1562probe_stack_range (HOST_WIDE_INT first, rtx size)
edff2491 1563{
4b6c1672
RK
1564 /* First ensure SIZE is Pmode. */
1565 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1566 size = convert_to_mode (Pmode, size, 1);
1567
d809253a
EB
1568 /* Next see if we have a function to check the stack. */
1569 if (stack_check_libfunc)
f5f5363f 1570 {
d809253a 1571 rtx addr = memory_address (Pmode,
2b3aadfc
RH
1572 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1573 stack_pointer_rtx,
1574 plus_constant (size, first)));
949fa04c
EB
1575 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1576 Pmode);
f5f5363f 1577 }
14a774a9 1578
d809253a 1579 /* Next see if we have an insn to check the stack. */
edff2491 1580#ifdef HAVE_check_stack
14a774a9 1581 else if (HAVE_check_stack)
edff2491 1582 {
d809253a
EB
1583 rtx addr = memory_address (Pmode,
1584 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1585 stack_pointer_rtx,
1586 plus_constant (size, first)));
1587 insn_operand_predicate_fn pred
1588 = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1589 if (pred && !((*pred) (addr, Pmode)))
1590 addr = copy_to_mode_reg (Pmode, addr);
edff2491 1591
d809253a 1592 emit_insn (gen_check_stack (addr));
edff2491
RK
1593 }
1594#endif
1595
d809253a
EB
1596 /* Otherwise we have to generate explicit probes. If we have a constant
1597 small number of them to generate, that's the easy case. */
1598 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
edff2491 1599 {
d809253a
EB
1600 HOST_WIDE_INT isize = INTVAL (size), i;
1601 rtx addr;
1602
1603 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1604 it exceeds SIZE. If only one probe is needed, this will not
1605 generate any code. Then probe at FIRST + SIZE. */
1606 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1607 {
1608 addr = memory_address (Pmode,
1609 plus_constant (stack_pointer_rtx,
1610 STACK_GROW_OFF (first + i)));
1611 emit_stack_probe (addr);
1612 }
1613
1614 addr = memory_address (Pmode,
1615 plus_constant (stack_pointer_rtx,
1616 STACK_GROW_OFF (first + isize)));
1617 emit_stack_probe (addr);
edff2491
RK
1618 }
1619
d809253a
EB
1620 /* In the variable case, do the same as above, but in a loop. Note that we
1621 must be extra careful with variables wrapping around because we might be
1622 at the very top (or the very bottom) of the address space and we have to
1623 be able to handle this case properly; in particular, we use an equality
1624 test for the loop condition. */
edff2491
RK
1625 else
1626 {
d809253a 1627 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
edff2491 1628 rtx loop_lab = gen_label_rtx ();
edff2491 1629 rtx end_lab = gen_label_rtx ();
edff2491 1630
edff2491 1631
d809253a
EB
1632 /* Step 1: round SIZE to the previous multiple of the interval. */
1633
1634 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1635 rounded_size
1636 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1637 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1638
1639
1640 /* Step 2: compute initial and final value of the loop counter. */
1641
1642 /* TEST_ADDR = SP + FIRST. */
1643 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1644 stack_pointer_rtx,
1645 GEN_INT (first)), NULL_RTX);
1646
1647 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1648 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1649 test_addr,
1650 rounded_size_op), NULL_RTX);
1651
1652
1653 /* Step 3: the loop
1654
1655 while (TEST_ADDR != LAST_ADDR)
1656 {
1657 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1658 probe at TEST_ADDR
1659 }
1660
1661 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1662 until it is equal to ROUNDED_SIZE. */
edff2491
RK
1663
1664 emit_label (loop_lab);
edff2491 1665
d809253a
EB
1666 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1667 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1668 end_lab);
1669
1670 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1671 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1672 GEN_INT (PROBE_INTERVAL), test_addr,
edff2491 1673 1, OPTAB_WIDEN);
edff2491 1674
5b0264cb 1675 gcc_assert (temp == test_addr);
edff2491 1676
d809253a
EB
1677 /* Probe at TEST_ADDR. */
1678 emit_stack_probe (test_addr);
1679
1680 emit_jump (loop_lab);
1681
edff2491
RK
1682 emit_label (end_lab);
1683
d809253a
EB
1684
1685 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1686 that SIZE is equal to ROUNDED_SIZE. */
1687
1688 /* TEMP = SIZE - ROUNDED_SIZE. */
1689 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1690 if (temp != const0_rtx)
1691 {
1692 rtx addr;
1693
32990d5b 1694 if (CONST_INT_P (temp))
d809253a
EB
1695 {
1696 /* Use [base + disp} addressing mode if supported. */
1697 HOST_WIDE_INT offset = INTVAL (temp);
1698 addr = memory_address (Pmode,
1699 plus_constant (last_addr,
1700 STACK_GROW_OFF (offset)));
1701 }
1702 else
1703 {
1704 /* Manual CSE if the difference is not known at compile-time. */
1705 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1706 addr = memory_address (Pmode,
1707 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1708 last_addr, temp));
1709 }
1710
1711 emit_stack_probe (addr);
1712 }
edff2491
RK
1713 }
1714}
d809253a 1715
c35af30f
EB
1716/* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1717 while probing it. This pushes when SIZE is positive. SIZE need not
1718 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1719 by plus SIZE at the end. */
d809253a 1720
c35af30f
EB
1721void
1722anti_adjust_stack_and_probe (rtx size, bool adjust_back)
d809253a 1723{
c35af30f
EB
1724 /* We skip the probe for the first interval + a small dope of 4 words and
1725 probe that many bytes past the specified size to maintain a protection
1726 area at the botton of the stack. */
d809253a
EB
1727 const int dope = 4 * UNITS_PER_WORD;
1728
1729 /* First ensure SIZE is Pmode. */
1730 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1731 size = convert_to_mode (Pmode, size, 1);
1732
1733 /* If we have a constant small number of probes to generate, that's the
1734 easy case. */
32990d5b 1735 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
d809253a
EB
1736 {
1737 HOST_WIDE_INT isize = INTVAL (size), i;
1738 bool first_probe = true;
1739
260c8ba3 1740 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1741 values of N from 1 until it exceeds SIZE. If only one probe is
1742 needed, this will not generate any code. Then adjust and probe
1743 to PROBE_INTERVAL + SIZE. */
1744 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1745 {
1746 if (first_probe)
1747 {
1748 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1749 first_probe = false;
1750 }
1751 else
1752 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1753 emit_stack_probe (stack_pointer_rtx);
1754 }
1755
1756 if (first_probe)
1757 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1758 else
1759 anti_adjust_stack (plus_constant (size, PROBE_INTERVAL - i));
1760 emit_stack_probe (stack_pointer_rtx);
1761 }
1762
1763 /* In the variable case, do the same as above, but in a loop. Note that we
1764 must be extra careful with variables wrapping around because we might be
1765 at the very top (or the very bottom) of the address space and we have to
1766 be able to handle this case properly; in particular, we use an equality
1767 test for the loop condition. */
1768 else
1769 {
1770 rtx rounded_size, rounded_size_op, last_addr, temp;
1771 rtx loop_lab = gen_label_rtx ();
1772 rtx end_lab = gen_label_rtx ();
1773
1774
1775 /* Step 1: round SIZE to the previous multiple of the interval. */
1776
1777 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1778 rounded_size
1779 = simplify_gen_binary (AND, Pmode, size, GEN_INT (-PROBE_INTERVAL));
1780 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1781
1782
1783 /* Step 2: compute initial and final value of the loop counter. */
1784
1785 /* SP = SP_0 + PROBE_INTERVAL. */
1786 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1787
1788 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1789 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1790 stack_pointer_rtx,
1791 rounded_size_op), NULL_RTX);
1792
1793
1794 /* Step 3: the loop
1795
260c8ba3
EB
1796 while (SP != LAST_ADDR)
1797 {
1798 SP = SP + PROBE_INTERVAL
1799 probe at SP
1800 }
d809253a 1801
260c8ba3 1802 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
d809253a
EB
1803 values of N from 1 until it is equal to ROUNDED_SIZE. */
1804
1805 emit_label (loop_lab);
1806
1807 /* Jump to END_LAB if SP == LAST_ADDR. */
1808 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1809 Pmode, 1, end_lab);
1810
1811 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1812 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1813 emit_stack_probe (stack_pointer_rtx);
1814
1815 emit_jump (loop_lab);
1816
1817 emit_label (end_lab);
1818
1819
260c8ba3 1820 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
d809253a
EB
1821 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1822
1823 /* TEMP = SIZE - ROUNDED_SIZE. */
1824 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1825 if (temp != const0_rtx)
1826 {
1827 /* Manual CSE if the difference is not known at compile-time. */
1828 if (GET_CODE (temp) != CONST_INT)
1829 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1830 anti_adjust_stack (temp);
1831 emit_stack_probe (stack_pointer_rtx);
1832 }
1833 }
1834
c35af30f
EB
1835 /* Adjust back and account for the additional first interval. */
1836 if (adjust_back)
1837 adjust_stack (plus_constant (size, PROBE_INTERVAL + dope));
1838 else
1839 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
d809253a
EB
1840}
1841
18ca7dab
RK
1842/* Return an rtx representing the register or memory location
1843 in which a scalar value of data type VALTYPE
1844 was returned by a function call to function FUNC.
1d636cc6
RG
1845 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1846 function is known, otherwise 0.
4dc07bd7
JJ
1847 OUTGOING is 1 if on a machine with register windows this function
1848 should return the register in which the function will put its result
30f7a378 1849 and 0 otherwise. */
18ca7dab
RK
1850
1851rtx
586de218 1852hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
502b8322 1853 int outgoing ATTRIBUTE_UNUSED)
18ca7dab 1854{
4dc07bd7 1855 rtx val;
770ae6cc 1856
1d636cc6 1857 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
770ae6cc 1858
f8cfc6aa 1859 if (REG_P (val)
e1a4071f
JL
1860 && GET_MODE (val) == BLKmode)
1861 {
770ae6cc 1862 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
e1a4071f 1863 enum machine_mode tmpmode;
770ae6cc 1864
d9b3eb63 1865 /* int_size_in_bytes can return -1. We don't need a check here
535a42b1
NS
1866 since the value of bytes will then be large enough that no
1867 mode will match anyway. */
d9b3eb63 1868
e1a4071f 1869 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0fb7aeda
KH
1870 tmpmode != VOIDmode;
1871 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1872 {
1873 /* Have we found a large enough mode? */
1874 if (GET_MODE_SIZE (tmpmode) >= bytes)
1875 break;
1876 }
e1a4071f
JL
1877
1878 /* No suitable mode found. */
5b0264cb 1879 gcc_assert (tmpmode != VOIDmode);
e1a4071f
JL
1880
1881 PUT_MODE (val, tmpmode);
d9b3eb63 1882 }
e1a4071f 1883 return val;
18ca7dab
RK
1884}
1885
1886/* Return an rtx representing the register or memory location
1887 in which a scalar value of mode MODE was returned by a library call. */
1888
1889rtx
390b17c2 1890hard_libcall_value (enum machine_mode mode, rtx fun)
18ca7dab 1891{
390b17c2 1892 return targetm.calls.libcall_value (mode, fun);
18ca7dab 1893}
0c5e217d
RS
1894
1895/* Look up the tree code for a given rtx code
1896 to provide the arithmetic operation for REAL_ARITHMETIC.
1897 The function returns an int because the caller may not know
1898 what `enum tree_code' means. */
1899
1900int
502b8322 1901rtx_to_tree_code (enum rtx_code code)
0c5e217d
RS
1902{
1903 enum tree_code tcode;
1904
1905 switch (code)
1906 {
1907 case PLUS:
1908 tcode = PLUS_EXPR;
1909 break;
1910 case MINUS:
1911 tcode = MINUS_EXPR;
1912 break;
1913 case MULT:
1914 tcode = MULT_EXPR;
1915 break;
1916 case DIV:
1917 tcode = RDIV_EXPR;
1918 break;
1919 case SMIN:
1920 tcode = MIN_EXPR;
1921 break;
1922 case SMAX:
1923 tcode = MAX_EXPR;
1924 break;
1925 default:
1926 tcode = LAST_AND_UNUSED_TREE_CODE;
1927 break;
1928 }
1929 return ((int) tcode);
1930}
e2500fed
GK
1931
1932#include "gt-explow.h"