]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
pt.c (lookup_template_class): Make sure it's a primary template or template_template_...
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
ef58a523
JL
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000 Free Software Foundation, Inc.
18ca7dab
RK
4
5This file is part of GNU CC.
6
7GNU CC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9the Free Software Foundation; either version 2, or (at your option)
10any later version.
11
12GNU CC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
18along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
19the Free Software Foundation, 59 Temple Place - Suite 330,
20Boston, MA 02111-1307, USA. */
18ca7dab
RK
21
22
23#include "config.h"
670ee920 24#include "system.h"
01198c2f 25#include "toplev.h"
18ca7dab
RK
26#include "rtl.h"
27#include "tree.h"
6baf1cc8 28#include "tm_p.h"
18ca7dab 29#include "flags.h"
49ad7cfa 30#include "function.h"
18ca7dab
RK
31#include "expr.h"
32#include "hard-reg-set.h"
33#include "insn-config.h"
34#include "recog.h"
35#include "insn-flags.h"
36#include "insn-codes.h"
37
c795bca9
BS
38#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
39#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
40#endif
41
711d877c
KG
42static rtx break_out_memory_refs PARAMS ((rtx));
43static void emit_stack_probe PARAMS ((rtx));
7e4ce834
RH
44
45
46/* Truncate and perhaps sign-extend C as appropriate for MODE. */
47
48HOST_WIDE_INT
49trunc_int_for_mode (c, mode)
50 HOST_WIDE_INT c;
51 enum machine_mode mode;
52{
53 int width = GET_MODE_BITSIZE (mode);
54
1f3f36d1
RH
55 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
56 if (mode == BImode)
57 return c & 1 ? STORE_FLAG_VALUE : 0;
58
7e4ce834
RH
59 /* We clear out all bits that don't belong in MODE, unless they and our
60 sign bit are all one. So we get either a reasonable negative
61 value or a reasonable unsigned value. */
62
63 if (width < HOST_BITS_PER_WIDE_INT
64 && ((c & ((HOST_WIDE_INT) (-1) << (width - 1)))
65 != ((HOST_WIDE_INT) (-1) << (width - 1))))
66 c &= ((HOST_WIDE_INT) 1 << width) - 1;
67
68 /* If this would be an entire word for the target, but is not for
69 the host, then sign-extend on the host so that the number will look
70 the same way on the host that it would on the target.
71
72 For example, when building a 64 bit alpha hosted 32 bit sparc
73 targeted compiler, then we want the 32 bit unsigned value -1 to be
74 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
75 The later confuses the sparc backend. */
76
77 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
78 && BITS_PER_WORD == width
79 && (c & ((HOST_WIDE_INT) 1 << (width - 1))))
80 c |= ((HOST_WIDE_INT) (-1) << width);
81
82 return c;
83}
84
b1ec3c92
CH
85/* Return an rtx for the sum of X and the integer C.
86
8008b228 87 This function should be used via the `plus_constant' macro. */
18ca7dab
RK
88
89rtx
b1ec3c92 90plus_constant_wide (x, c)
18ca7dab 91 register rtx x;
b1ec3c92 92 register HOST_WIDE_INT c;
18ca7dab
RK
93{
94 register RTX_CODE code;
95 register enum machine_mode mode;
96 register rtx tem;
97 int all_constant = 0;
98
99 if (c == 0)
100 return x;
101
102 restart:
103
104 code = GET_CODE (x);
105 mode = GET_MODE (x);
106 switch (code)
107 {
108 case CONST_INT:
b1ec3c92 109 return GEN_INT (INTVAL (x) + c);
18ca7dab
RK
110
111 case CONST_DOUBLE:
112 {
f9e158c3 113 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
b1ec3c92 114 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
f9e158c3 115 unsigned HOST_WIDE_INT l2 = c;
b1ec3c92 116 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
f9e158c3
JM
117 unsigned HOST_WIDE_INT lv;
118 HOST_WIDE_INT hv;
18ca7dab
RK
119
120 add_double (l1, h1, l2, h2, &lv, &hv);
121
122 return immed_double_const (lv, hv, VOIDmode);
123 }
124
125 case MEM:
126 /* If this is a reference to the constant pool, try replacing it with
127 a reference to a new constant. If the resulting address isn't
128 valid, don't return it because we have no way to validize it. */
129 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
130 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
131 {
132 tem
133 = force_const_mem (GET_MODE (x),
134 plus_constant (get_pool_constant (XEXP (x, 0)),
135 c));
136 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
137 return tem;
138 }
139 break;
140
141 case CONST:
142 /* If adding to something entirely constant, set a flag
143 so that we can add a CONST around the result. */
144 x = XEXP (x, 0);
145 all_constant = 1;
146 goto restart;
147
148 case SYMBOL_REF:
149 case LABEL_REF:
150 all_constant = 1;
151 break;
152
153 case PLUS:
154 /* The interesting case is adding the integer to a sum.
155 Look for constant term in the sum and combine
156 with C. For an integer constant term, we make a combined
157 integer. For a constant term that is not an explicit integer,
e5671f2b
RK
158 we cannot really combine, but group them together anyway.
159
03d937fc
R
160 Restart or use a recursive call in case the remaining operand is
161 something that we handle specially, such as a SYMBOL_REF.
162
163 We may not immediately return from the recursive call here, lest
164 all_constant gets lost. */
e5671f2b
RK
165
166 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
03d937fc
R
167 {
168 c += INTVAL (XEXP (x, 1));
7e4ce834
RH
169
170 if (GET_MODE (x) != VOIDmode)
171 c = trunc_int_for_mode (c, GET_MODE (x));
172
03d937fc
R
173 x = XEXP (x, 0);
174 goto restart;
175 }
18ca7dab 176 else if (CONSTANT_P (XEXP (x, 0)))
03d937fc
R
177 {
178 x = gen_rtx_PLUS (mode,
179 plus_constant (XEXP (x, 0), c),
180 XEXP (x, 1));
181 c = 0;
182 }
18ca7dab 183 else if (CONSTANT_P (XEXP (x, 1)))
03d937fc
R
184 {
185 x = gen_rtx_PLUS (mode,
186 XEXP (x, 0),
187 plus_constant (XEXP (x, 1), c));
188 c = 0;
189 }
38a448ca
RH
190 break;
191
192 default:
193 break;
18ca7dab
RK
194 }
195
196 if (c != 0)
38a448ca 197 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
18ca7dab
RK
198
199 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
200 return x;
201 else if (all_constant)
38a448ca 202 return gen_rtx_CONST (mode, x);
18ca7dab
RK
203 else
204 return x;
205}
206
b1ec3c92
CH
207/* This is the same as `plus_constant', except that it handles LO_SUM.
208
209 This function should be used via the `plus_constant_for_output' macro. */
18ca7dab
RK
210
211rtx
b1ec3c92 212plus_constant_for_output_wide (x, c)
18ca7dab 213 register rtx x;
b1ec3c92 214 register HOST_WIDE_INT c;
18ca7dab 215{
18ca7dab 216 register enum machine_mode mode = GET_MODE (x);
18ca7dab
RK
217
218 if (GET_CODE (x) == LO_SUM)
38a448ca 219 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
c5c76735 220 plus_constant_for_output (XEXP (x, 1), c));
18ca7dab
RK
221
222 else
223 return plus_constant (x, c);
224}
225\f
226/* If X is a sum, return a new sum like X but lacking any constant terms.
227 Add all the removed constant terms into *CONSTPTR.
228 X itself is not altered. The result != X if and only if
229 it is not isomorphic to X. */
230
231rtx
232eliminate_constant_term (x, constptr)
233 rtx x;
234 rtx *constptr;
235{
236 register rtx x0, x1;
237 rtx tem;
238
239 if (GET_CODE (x) != PLUS)
240 return x;
241
242 /* First handle constants appearing at this level explicitly. */
243 if (GET_CODE (XEXP (x, 1)) == CONST_INT
244 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
245 XEXP (x, 1)))
246 && GET_CODE (tem) == CONST_INT)
247 {
248 *constptr = tem;
249 return eliminate_constant_term (XEXP (x, 0), constptr);
250 }
251
252 tem = const0_rtx;
253 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
254 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
255 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
256 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
257 *constptr, tem))
258 && GET_CODE (tem) == CONST_INT)
259 {
260 *constptr = tem;
38a448ca 261 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
262 }
263
264 return x;
265}
266
267/* Returns the insn that next references REG after INSN, or 0
268 if REG is clobbered before next referenced or we cannot find
269 an insn that references REG in a straight-line piece of code. */
270
271rtx
272find_next_ref (reg, insn)
273 rtx reg;
274 rtx insn;
275{
276 rtx next;
277
278 for (insn = NEXT_INSN (insn); insn; insn = next)
279 {
280 next = NEXT_INSN (insn);
281 if (GET_CODE (insn) == NOTE)
282 continue;
283 if (GET_CODE (insn) == CODE_LABEL
284 || GET_CODE (insn) == BARRIER)
285 return 0;
286 if (GET_CODE (insn) == INSN
287 || GET_CODE (insn) == JUMP_INSN
288 || GET_CODE (insn) == CALL_INSN)
289 {
290 if (reg_set_p (reg, insn))
291 return 0;
292 if (reg_mentioned_p (reg, PATTERN (insn)))
293 return insn;
294 if (GET_CODE (insn) == JUMP_INSN)
295 {
7f1c097d 296 if (any_uncondjump_p (insn))
18ca7dab
RK
297 next = JUMP_LABEL (insn);
298 else
299 return 0;
300 }
301 if (GET_CODE (insn) == CALL_INSN
302 && REGNO (reg) < FIRST_PSEUDO_REGISTER
303 && call_used_regs[REGNO (reg)])
304 return 0;
305 }
306 else
307 abort ();
308 }
309 return 0;
310}
311
312/* Return an rtx for the size in bytes of the value of EXP. */
313
314rtx
315expr_size (exp)
316 tree exp;
317{
99098c66
RK
318 tree size = size_in_bytes (TREE_TYPE (exp));
319
320 if (TREE_CODE (size) != INTEGER_CST
321 && contains_placeholder_p (size))
322 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
323
8fbea4dc
RK
324 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
325 EXPAND_MEMORY_USE_BAD);
18ca7dab
RK
326}
327\f
328/* Return a copy of X in which all memory references
329 and all constants that involve symbol refs
330 have been replaced with new temporary registers.
331 Also emit code to load the memory locations and constants
332 into those registers.
333
334 If X contains no such constants or memory references,
335 X itself (not a copy) is returned.
336
337 If a constant is found in the address that is not a legitimate constant
338 in an insn, it is left alone in the hope that it might be valid in the
339 address.
340
341 X may contain no arithmetic except addition, subtraction and multiplication.
342 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
343
344static rtx
345break_out_memory_refs (x)
346 register rtx x;
347{
348 if (GET_CODE (x) == MEM
cabeca29 349 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 350 && GET_MODE (x) != VOIDmode))
2cca6e3f 351 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
352 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
353 || GET_CODE (x) == MULT)
354 {
355 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
356 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 357
18ca7dab 358 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 359 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab 360 }
2cca6e3f 361
18ca7dab
RK
362 return x;
363}
364
ea534b63
RK
365#ifdef POINTERS_EXTEND_UNSIGNED
366
367/* Given X, a memory address in ptr_mode, convert it to an address
498b529f
RK
368 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
369 the fact that pointers are not allowed to overflow by commuting arithmetic
370 operations over conversions so that address arithmetic insns can be
371 used. */
ea534b63 372
498b529f
RK
373rtx
374convert_memory_address (to_mode, x)
375 enum machine_mode to_mode;
ea534b63
RK
376 rtx x;
377{
0b04ec8c 378 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
498b529f
RK
379 rtx temp;
380
0b04ec8c
RK
381 /* Here we handle some special cases. If none of them apply, fall through
382 to the default case. */
ea534b63
RK
383 switch (GET_CODE (x))
384 {
385 case CONST_INT:
386 case CONST_DOUBLE:
498b529f
RK
387 return x;
388
ea534b63 389 case LABEL_REF:
38a448ca
RH
390 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
391 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
392 return temp;
498b529f 393
ea534b63 394 case SYMBOL_REF:
38a448ca 395 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
498b529f 396 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
d7dc4377 397 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
7d797311 398 STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
498b529f 399 return temp;
ea534b63 400
498b529f 401 case CONST:
38a448ca
RH
402 return gen_rtx_CONST (to_mode,
403 convert_memory_address (to_mode, XEXP (x, 0)));
ea534b63 404
0b04ec8c
RK
405 case PLUS:
406 case MULT:
407 /* For addition the second operand is a small constant, we can safely
38a448ca 408 permute the conversion and addition operation. We can always safely
60725c78
RK
409 permute them if we are making the address narrower. In addition,
410 always permute the operations if this is a constant. */
0b04ec8c
RK
411 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
412 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
60725c78
RK
413 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
414 || CONSTANT_P (XEXP (x, 0)))))
38a448ca
RH
415 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
416 convert_memory_address (to_mode, XEXP (x, 0)),
417 convert_memory_address (to_mode, XEXP (x, 1)));
418 break;
419
420 default:
421 break;
ea534b63 422 }
0b04ec8c
RK
423
424 return convert_modes (to_mode, from_mode,
425 x, POINTERS_EXTEND_UNSIGNED);
ea534b63
RK
426}
427#endif
428
18ca7dab
RK
429/* Given a memory address or facsimile X, construct a new address,
430 currently equivalent, that is stable: future stores won't change it.
431
432 X must be composed of constants, register and memory references
433 combined with addition, subtraction and multiplication:
434 in other words, just what you can get from expand_expr if sum_ok is 1.
435
436 Works by making copies of all regs and memory locations used
437 by X and combining them the same way X does.
438 You could also stabilize the reference to this address
439 by copying the address to a register with copy_to_reg;
440 but then you wouldn't get indexed addressing in the reference. */
441
442rtx
443copy_all_regs (x)
444 register rtx x;
445{
446 if (GET_CODE (x) == REG)
447 {
11c50c5e
DE
448 if (REGNO (x) != FRAME_POINTER_REGNUM
449#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
450 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
451#endif
452 )
18ca7dab
RK
453 x = copy_to_reg (x);
454 }
455 else if (GET_CODE (x) == MEM)
456 x = copy_to_reg (x);
457 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
458 || GET_CODE (x) == MULT)
459 {
460 register rtx op0 = copy_all_regs (XEXP (x, 0));
461 register rtx op1 = copy_all_regs (XEXP (x, 1));
462 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 463 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab
RK
464 }
465 return x;
466}
467\f
468/* Return something equivalent to X but valid as a memory address
469 for something of mode MODE. When X is not itself valid, this
470 works by copying X or subexpressions of it into registers. */
471
472rtx
473memory_address (mode, x)
474 enum machine_mode mode;
475 register rtx x;
476{
18b9ca6f 477 register rtx oldx = x;
18ca7dab 478
38a448ca
RH
479 if (GET_CODE (x) == ADDRESSOF)
480 return x;
481
ea534b63
RK
482#ifdef POINTERS_EXTEND_UNSIGNED
483 if (GET_MODE (x) == ptr_mode)
498b529f 484 x = convert_memory_address (Pmode, x);
ea534b63
RK
485#endif
486
18ca7dab
RK
487 /* By passing constant addresses thru registers
488 we get a chance to cse them. */
cabeca29 489 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
18b9ca6f 490 x = force_reg (Pmode, x);
18ca7dab
RK
491
492 /* Accept a QUEUED that refers to a REG
493 even though that isn't a valid address.
494 On attempting to put this in an insn we will call protect_from_queue
495 which will turn it into a REG, which is valid. */
18b9ca6f 496 else if (GET_CODE (x) == QUEUED
18ca7dab 497 && GET_CODE (QUEUED_VAR (x)) == REG)
18b9ca6f 498 ;
18ca7dab
RK
499
500 /* We get better cse by rejecting indirect addressing at this stage.
501 Let the combiner create indirect addresses where appropriate.
502 For now, generate the code so that the subexpressions useful to share
503 are visible. But not if cse won't be done! */
18b9ca6f 504 else
18ca7dab 505 {
18b9ca6f
RK
506 if (! cse_not_expected && GET_CODE (x) != REG)
507 x = break_out_memory_refs (x);
508
509 /* At this point, any valid address is accepted. */
510 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
511
512 /* If it was valid before but breaking out memory refs invalidated it,
513 use it the old way. */
514 if (memory_address_p (mode, oldx))
515 goto win2;
516
517 /* Perform machine-dependent transformations on X
518 in certain cases. This is not necessary since the code
519 below can handle all possible cases, but machine-dependent
520 transformations can make better code. */
521 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
522
523 /* PLUS and MULT can appear in special ways
524 as the result of attempts to make an address usable for indexing.
525 Usually they are dealt with by calling force_operand, below.
526 But a sum containing constant terms is special
527 if removing them makes the sum a valid address:
528 then we generate that address in a register
529 and index off of it. We do this because it often makes
530 shorter code, and because the addresses thus generated
531 in registers often become common subexpressions. */
532 if (GET_CODE (x) == PLUS)
533 {
534 rtx constant_term = const0_rtx;
535 rtx y = eliminate_constant_term (x, &constant_term);
536 if (constant_term == const0_rtx
537 || ! memory_address_p (mode, y))
538 x = force_operand (x, NULL_RTX);
539 else
540 {
38a448ca 541 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
18b9ca6f
RK
542 if (! memory_address_p (mode, y))
543 x = force_operand (x, NULL_RTX);
544 else
545 x = y;
546 }
547 }
18ca7dab 548
e475ed2a 549 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 550 x = force_operand (x, NULL_RTX);
18ca7dab 551
18b9ca6f
RK
552 /* If we have a register that's an invalid address,
553 it must be a hard reg of the wrong class. Copy it to a pseudo. */
554 else if (GET_CODE (x) == REG)
555 x = copy_to_reg (x);
556
557 /* Last resort: copy the value to a register, since
558 the register is a valid address. */
559 else
560 x = force_reg (Pmode, x);
561
562 goto done;
18ca7dab 563
c02a7fbb
RK
564 win2:
565 x = oldx;
566 win:
567 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
568 /* Don't copy an addr via a reg if it is one of our stack slots. */
569 && ! (GET_CODE (x) == PLUS
570 && (XEXP (x, 0) == virtual_stack_vars_rtx
571 || XEXP (x, 0) == virtual_incoming_args_rtx)))
572 {
573 if (general_operand (x, Pmode))
574 x = force_reg (Pmode, x);
575 else
576 x = force_operand (x, NULL_RTX);
577 }
18ca7dab 578 }
18b9ca6f
RK
579
580 done:
581
2cca6e3f
RK
582 /* If we didn't change the address, we are done. Otherwise, mark
583 a reg as a pointer if we have REG or REG + CONST_INT. */
584 if (oldx == x)
585 return x;
586 else if (GET_CODE (x) == REG)
bdb429a5 587 mark_reg_pointer (x, BITS_PER_UNIT);
2cca6e3f
RK
588 else if (GET_CODE (x) == PLUS
589 && GET_CODE (XEXP (x, 0)) == REG
590 && GET_CODE (XEXP (x, 1)) == CONST_INT)
bdb429a5 591 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
2cca6e3f 592
18b9ca6f
RK
593 /* OLDX may have been the address on a temporary. Update the address
594 to indicate that X is now used. */
595 update_temp_slot_address (oldx, x);
596
18ca7dab
RK
597 return x;
598}
599
600/* Like `memory_address' but pretend `flag_force_addr' is 0. */
601
602rtx
603memory_address_noforce (mode, x)
604 enum machine_mode mode;
605 rtx x;
606{
607 int ambient_force_addr = flag_force_addr;
608 rtx val;
609
610 flag_force_addr = 0;
611 val = memory_address (mode, x);
612 flag_force_addr = ambient_force_addr;
613 return val;
614}
615
616/* Convert a mem ref into one with a valid memory address.
617 Pass through anything else unchanged. */
618
619rtx
620validize_mem (ref)
621 rtx ref;
622{
623 if (GET_CODE (ref) != MEM)
624 return ref;
625 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
626 return ref;
627 /* Don't alter REF itself, since that is probably a stack slot. */
628 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
629}
630\f
258a120b
JM
631/* Given REF, either a MEM or a REG, and T, either the type of X or
632 the expression corresponding to REF, set RTX_UNCHANGING_P if
633 appropriate. */
634
635void
636maybe_set_unchanging (ref, t)
637 rtx ref;
638 tree t;
639{
640 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
641 initialization is only executed once, or whose initializer always
642 has the same value. Currently we simplify this to PARM_DECLs in the
643 first case, and decls with TREE_CONSTANT initializers in the second. */
644 if ((TREE_READONLY (t) && DECL_P (t)
645 && (TREE_CODE (t) == PARM_DECL
646 || DECL_INITIAL (t) == NULL_TREE
647 || TREE_CONSTANT (DECL_INITIAL (t))))
648 || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
649 RTX_UNCHANGING_P (ref) = 1;
650}
651
3bdf5ad1
RK
652/* Given REF, a MEM, and T, either the type of X or the expression
653 corresponding to REF, set the memory attributes. OBJECTP is nonzero
654 if we are making a new object of this type. */
655
656void
657set_mem_attributes (ref, t, objectp)
658 rtx ref;
659 tree t;
660 int objectp;
661{
be8d9ace
RH
662 tree type;
663
664 /* It can happen that type_for_mode was given a mode for which there
665 is no language-level type. In which case it returns NULL, which
666 we can see here. */
667 if (t == NULL_TREE)
668 return;
669
670 type = TYPE_P (t) ? t : TREE_TYPE (t);
3bdf5ad1
RK
671
672 /* Get the alias set from the expression or type (perhaps using a
673 front-end routine) and then copy bits from the type. */
258a120b
JM
674
675 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
676 here, because, in C and C++, the fact that a location is accessed
677 through a const expression does not mean that the value there can
678 never change. */
3bdf5ad1 679 MEM_ALIAS_SET (ref) = get_alias_set (t);
3bdf5ad1
RK
680 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
681 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
682
683 /* If we are making an object of this type, we know that it is a scalar if
684 the type is not an aggregate. */
685 if (objectp && ! AGGREGATE_TYPE_P (type))
686 MEM_SCALAR_P (ref) = 1;
687
688 /* If T is a type, this is all we can do. Otherwise, we may be able
689 to deduce some more information about the expression. */
690 if (TYPE_P (t))
691 return;
692
258a120b 693 maybe_set_unchanging (ref, t);
3bdf5ad1
RK
694 if (TREE_THIS_VOLATILE (t))
695 MEM_VOLATILE_P (ref) = 1;
696
697 /* Now see if we can say more about whether it's an aggregate or
698 scalar. If we already know it's an aggregate, don't bother. */
699 if (MEM_IN_STRUCT_P (ref))
700 return;
701
702 /* Now remove any NOPs: they don't change what the underlying object is.
703 Likewise for SAVE_EXPR. */
704 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
705 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
706 t = TREE_OPERAND (t, 0);
707
708 /* Since we already know the type isn't an aggregate, if this is a decl,
709 it must be a scalar. Or if it is a reference into an aggregate,
710 this is part of an aggregate. Otherwise we don't know. */
711 if (DECL_P (t))
712 MEM_SCALAR_P (ref) = 1;
713 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
714 || TREE_CODE (t) == BIT_FIELD_REF)
715 MEM_IN_STRUCT_P (ref) = 1;
716}
717\f
18ca7dab
RK
718/* Return a modified copy of X with its memory address copied
719 into a temporary register to protect it from side effects.
720 If X is not a MEM, it is returned unchanged (and not copied).
721 Perhaps even if it is a MEM, if there is no need to change it. */
722
723rtx
724stabilize (x)
725 rtx x;
726{
727 register rtx addr;
3bdf5ad1 728
18ca7dab
RK
729 if (GET_CODE (x) != MEM)
730 return x;
3bdf5ad1 731
18ca7dab
RK
732 addr = XEXP (x, 0);
733 if (rtx_unstable_p (addr))
734 {
3bdf5ad1
RK
735 rtx temp = force_reg (Pmode, copy_all_regs (addr));
736 rtx mem = gen_rtx_MEM (GET_MODE (x), temp);
18ca7dab 737
c6df88cb 738 MEM_COPY_ATTRIBUTES (mem, x);
18ca7dab
RK
739 return mem;
740 }
741 return x;
742}
743\f
744/* Copy the value or contents of X to a new temp reg and return that reg. */
745
746rtx
747copy_to_reg (x)
748 rtx x;
749{
750 register rtx temp = gen_reg_rtx (GET_MODE (x));
751
752 /* If not an operand, must be an address with PLUS and MULT so
753 do the computation. */
754 if (! general_operand (x, VOIDmode))
755 x = force_operand (x, temp);
756
757 if (x != temp)
758 emit_move_insn (temp, x);
759
760 return temp;
761}
762
763/* Like copy_to_reg but always give the new register mode Pmode
764 in case X is a constant. */
765
766rtx
767copy_addr_to_reg (x)
768 rtx x;
769{
770 return copy_to_mode_reg (Pmode, x);
771}
772
773/* Like copy_to_reg but always give the new register mode MODE
774 in case X is a constant. */
775
776rtx
777copy_to_mode_reg (mode, x)
778 enum machine_mode mode;
779 rtx x;
780{
781 register rtx temp = gen_reg_rtx (mode);
782
783 /* If not an operand, must be an address with PLUS and MULT so
784 do the computation. */
785 if (! general_operand (x, VOIDmode))
786 x = force_operand (x, temp);
787
788 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
789 abort ();
790 if (x != temp)
791 emit_move_insn (temp, x);
792 return temp;
793}
794
795/* Load X into a register if it is not already one.
796 Use mode MODE for the register.
797 X should be valid for mode MODE, but it may be a constant which
798 is valid for all integer modes; that's why caller must specify MODE.
799
800 The caller must not alter the value in the register we return,
801 since we mark it as a "constant" register. */
802
803rtx
804force_reg (mode, x)
805 enum machine_mode mode;
806 rtx x;
807{
62874575 808 register rtx temp, insn, set;
18ca7dab
RK
809
810 if (GET_CODE (x) == REG)
811 return x;
96843fa2 812
18ca7dab 813 temp = gen_reg_rtx (mode);
96843fa2
NC
814
815 if (! general_operand (x, mode))
816 x = force_operand (x, NULL_RTX);
817
18ca7dab 818 insn = emit_move_insn (temp, x);
62874575 819
18ca7dab 820 /* Let optimizers know that TEMP's value never changes
62874575
RK
821 and that X can be substituted for it. Don't get confused
822 if INSN set something else (such as a SUBREG of TEMP). */
823 if (CONSTANT_P (x)
824 && (set = single_set (insn)) != 0
825 && SET_DEST (set) == temp)
18ca7dab 826 {
b1ec3c92 827 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
18ca7dab
RK
828
829 if (note)
830 XEXP (note, 0) = x;
831 else
38a448ca 832 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
18ca7dab
RK
833 }
834 return temp;
835}
836
837/* If X is a memory ref, copy its contents to a new temp reg and return
838 that reg. Otherwise, return X. */
839
840rtx
841force_not_mem (x)
842 rtx x;
843{
844 register rtx temp;
fe3439b0 845
18ca7dab
RK
846 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
847 return x;
fe3439b0 848
18ca7dab
RK
849 temp = gen_reg_rtx (GET_MODE (x));
850 emit_move_insn (temp, x);
851 return temp;
852}
853
854/* Copy X to TARGET (if it's nonzero and a reg)
855 or to a new temp reg and return that reg.
856 MODE is the mode to use for X in case it is a constant. */
857
858rtx
859copy_to_suggested_reg (x, target, mode)
860 rtx x, target;
861 enum machine_mode mode;
862{
863 register rtx temp;
864
865 if (target && GET_CODE (target) == REG)
866 temp = target;
867 else
868 temp = gen_reg_rtx (mode);
869
870 emit_move_insn (temp, x);
871 return temp;
872}
873\f
9ff65789
RK
874/* Return the mode to use to store a scalar of TYPE and MODE.
875 PUNSIGNEDP points to the signedness of the type and may be adjusted
876 to show what signedness to use on extension operations.
877
878 FOR_CALL is non-zero if this call is promoting args for a call. */
879
880enum machine_mode
881promote_mode (type, mode, punsignedp, for_call)
882 tree type;
883 enum machine_mode mode;
884 int *punsignedp;
c84e2712 885 int for_call ATTRIBUTE_UNUSED;
9ff65789
RK
886{
887 enum tree_code code = TREE_CODE (type);
888 int unsignedp = *punsignedp;
889
890#ifdef PROMOTE_FOR_CALL_ONLY
891 if (! for_call)
892 return mode;
893#endif
894
895 switch (code)
896 {
897#ifdef PROMOTE_MODE
898 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
899 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
900 PROMOTE_MODE (mode, unsignedp, type);
901 break;
902#endif
903
ea534b63 904#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 905 case REFERENCE_TYPE:
9ff65789 906 case POINTER_TYPE:
ea534b63
RK
907 mode = Pmode;
908 unsignedp = POINTERS_EXTEND_UNSIGNED;
9ff65789 909 break;
ea534b63 910#endif
38a448ca
RH
911
912 default:
913 break;
9ff65789
RK
914 }
915
916 *punsignedp = unsignedp;
917 return mode;
918}
919\f
18ca7dab
RK
920/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
921 This pops when ADJUST is positive. ADJUST need not be constant. */
922
923void
924adjust_stack (adjust)
925 rtx adjust;
926{
927 rtx temp;
928 adjust = protect_from_queue (adjust, 0);
929
930 if (adjust == const0_rtx)
931 return;
932
1503a7ec
JH
933 /* We expect all variable sized adjustments to be multiple of
934 PREFERRED_STACK_BOUNDARY. */
935 if (GET_CODE (adjust) == CONST_INT)
936 stack_pointer_delta -= INTVAL (adjust);
937
18ca7dab
RK
938 temp = expand_binop (Pmode,
939#ifdef STACK_GROWS_DOWNWARD
940 add_optab,
941#else
942 sub_optab,
943#endif
944 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
945 OPTAB_LIB_WIDEN);
946
947 if (temp != stack_pointer_rtx)
948 emit_move_insn (stack_pointer_rtx, temp);
949}
950
951/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
952 This pushes when ADJUST is positive. ADJUST need not be constant. */
953
954void
955anti_adjust_stack (adjust)
956 rtx adjust;
957{
958 rtx temp;
959 adjust = protect_from_queue (adjust, 0);
960
961 if (adjust == const0_rtx)
962 return;
963
1503a7ec
JH
964 /* We expect all variable sized adjustments to be multiple of
965 PREFERRED_STACK_BOUNDARY. */
966 if (GET_CODE (adjust) == CONST_INT)
967 stack_pointer_delta += INTVAL (adjust);
968
18ca7dab
RK
969 temp = expand_binop (Pmode,
970#ifdef STACK_GROWS_DOWNWARD
971 sub_optab,
972#else
973 add_optab,
974#endif
975 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
976 OPTAB_LIB_WIDEN);
977
978 if (temp != stack_pointer_rtx)
979 emit_move_insn (stack_pointer_rtx, temp);
980}
981
982/* Round the size of a block to be pushed up to the boundary required
983 by this machine. SIZE is the desired size, which need not be constant. */
984
985rtx
986round_push (size)
987 rtx size;
988{
c795bca9
BS
989#ifdef PREFERRED_STACK_BOUNDARY
990 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
18ca7dab
RK
991 if (align == 1)
992 return size;
993 if (GET_CODE (size) == CONST_INT)
994 {
995 int new = (INTVAL (size) + align - 1) / align * align;
996 if (INTVAL (size) != new)
b1ec3c92 997 size = GEN_INT (new);
18ca7dab
RK
998 }
999 else
1000 {
5244db05 1001 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1002 but we know it can't. So add ourselves and then do
1003 TRUNC_DIV_EXPR. */
5244db05
RK
1004 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
1005 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1006 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
b1ec3c92
CH
1007 NULL_RTX, 1);
1008 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
18ca7dab 1009 }
c795bca9 1010#endif /* PREFERRED_STACK_BOUNDARY */
18ca7dab
RK
1011 return size;
1012}
1013\f
59257ff7
RK
1014/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1015 to a previously-created save area. If no save area has been allocated,
1016 this function will allocate one. If a save area is specified, it
1017 must be of the proper mode.
1018
1019 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
1020 are emitted at the current position. */
1021
1022void
1023emit_stack_save (save_level, psave, after)
1024 enum save_level save_level;
1025 rtx *psave;
1026 rtx after;
1027{
1028 rtx sa = *psave;
1029 /* The default is that we use a move insn and save in a Pmode object. */
711d877c 1030 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
a260abc9 1031 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
1032
1033 /* See if this machine has anything special to do for this kind of save. */
1034 switch (save_level)
1035 {
1036#ifdef HAVE_save_stack_block
1037 case SAVE_BLOCK:
1038 if (HAVE_save_stack_block)
a260abc9 1039 fcn = gen_save_stack_block;
59257ff7
RK
1040 break;
1041#endif
1042#ifdef HAVE_save_stack_function
1043 case SAVE_FUNCTION:
1044 if (HAVE_save_stack_function)
a260abc9 1045 fcn = gen_save_stack_function;
59257ff7
RK
1046 break;
1047#endif
1048#ifdef HAVE_save_stack_nonlocal
1049 case SAVE_NONLOCAL:
1050 if (HAVE_save_stack_nonlocal)
a260abc9 1051 fcn = gen_save_stack_nonlocal;
59257ff7
RK
1052 break;
1053#endif
38a448ca
RH
1054 default:
1055 break;
59257ff7
RK
1056 }
1057
1058 /* If there is no save area and we have to allocate one, do so. Otherwise
1059 verify the save area is the proper mode. */
1060
1061 if (sa == 0)
1062 {
1063 if (mode != VOIDmode)
1064 {
1065 if (save_level == SAVE_NONLOCAL)
1066 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1067 else
1068 *psave = sa = gen_reg_rtx (mode);
1069 }
1070 }
1071 else
1072 {
1073 if (mode == VOIDmode || GET_MODE (sa) != mode)
1074 abort ();
1075 }
1076
1077 if (after)
700f6f98
RK
1078 {
1079 rtx seq;
1080
1081 start_sequence ();
5460015d
JW
1082 /* We must validize inside the sequence, to ensure that any instructions
1083 created by the validize call also get moved to the right place. */
1084 if (sa != 0)
1085 sa = validize_mem (sa);
d072107f 1086 emit_insn (fcn (sa, stack_pointer_rtx));
700f6f98
RK
1087 seq = gen_sequence ();
1088 end_sequence ();
1089 emit_insn_after (seq, after);
1090 }
59257ff7 1091 else
5460015d
JW
1092 {
1093 if (sa != 0)
1094 sa = validize_mem (sa);
1095 emit_insn (fcn (sa, stack_pointer_rtx));
1096 }
59257ff7
RK
1097}
1098
1099/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1100 area made by emit_stack_save. If it is zero, we have nothing to do.
1101
1102 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1103 current position. */
1104
1105void
1106emit_stack_restore (save_level, sa, after)
1107 enum save_level save_level;
1108 rtx after;
1109 rtx sa;
1110{
1111 /* The default is that we use a move insn. */
711d877c 1112 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
59257ff7
RK
1113
1114 /* See if this machine has anything special to do for this kind of save. */
1115 switch (save_level)
1116 {
1117#ifdef HAVE_restore_stack_block
1118 case SAVE_BLOCK:
1119 if (HAVE_restore_stack_block)
1120 fcn = gen_restore_stack_block;
1121 break;
1122#endif
1123#ifdef HAVE_restore_stack_function
1124 case SAVE_FUNCTION:
1125 if (HAVE_restore_stack_function)
1126 fcn = gen_restore_stack_function;
1127 break;
1128#endif
1129#ifdef HAVE_restore_stack_nonlocal
59257ff7
RK
1130 case SAVE_NONLOCAL:
1131 if (HAVE_restore_stack_nonlocal)
1132 fcn = gen_restore_stack_nonlocal;
1133 break;
1134#endif
38a448ca
RH
1135 default:
1136 break;
59257ff7
RK
1137 }
1138
d072107f
RK
1139 if (sa != 0)
1140 sa = validize_mem (sa);
1141
59257ff7 1142 if (after)
700f6f98
RK
1143 {
1144 rtx seq;
1145
1146 start_sequence ();
d072107f 1147 emit_insn (fcn (stack_pointer_rtx, sa));
700f6f98
RK
1148 seq = gen_sequence ();
1149 end_sequence ();
1150 emit_insn_after (seq, after);
1151 }
59257ff7 1152 else
d072107f 1153 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7
RK
1154}
1155\f
c9ec4f99
DM
1156#ifdef SETJMP_VIA_SAVE_AREA
1157/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1158 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1159 platforms, the dynamic stack space used can corrupt the original
1160 frame, thus causing a crash if a longjmp unwinds to it. */
1161
1162void
1163optimize_save_area_alloca (insns)
1164 rtx insns;
1165{
1166 rtx insn;
1167
1168 for (insn = insns; insn; insn = NEXT_INSN(insn))
1169 {
1170 rtx note;
1171
1172 if (GET_CODE (insn) != INSN)
1173 continue;
1174
1175 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1176 {
1177 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1178 continue;
1179
1180 if (!current_function_calls_setjmp)
1181 {
1182 rtx pat = PATTERN (insn);
1183
1184 /* If we do not see the note in a pattern matching
1185 these precise characteristics, we did something
1186 entirely wrong in allocate_dynamic_stack_space.
1187
38e01259 1188 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
c9ec4f99
DM
1189 was defined on a machine where stacks grow towards higher
1190 addresses.
1191
1192 Right now only supported port with stack that grow upward
1193 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1194 if (GET_CODE (pat) != SET
1195 || SET_DEST (pat) != stack_pointer_rtx
1196 || GET_CODE (SET_SRC (pat)) != MINUS
1197 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1198 abort ();
1199
1200 /* This will now be transformed into a (set REG REG)
1201 so we can just blow away all the other notes. */
1202 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1203 REG_NOTES (insn) = NULL_RTX;
1204 }
1205 else
1206 {
1207 /* setjmp was called, we must remove the REG_SAVE_AREA
1208 note so that later passes do not get confused by its
1209 presence. */
1210 if (note == REG_NOTES (insn))
1211 {
1212 REG_NOTES (insn) = XEXP (note, 1);
1213 }
1214 else
1215 {
1216 rtx srch;
1217
1218 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1219 if (XEXP (srch, 1) == note)
1220 break;
1221
1222 if (srch == NULL_RTX)
1223 abort();
1224
1225 XEXP (srch, 1) = XEXP (note, 1);
1226 }
1227 }
1228 /* Once we've seen the note of interest, we need not look at
1229 the rest of them. */
1230 break;
1231 }
1232 }
1233}
1234#endif /* SETJMP_VIA_SAVE_AREA */
1235
18ca7dab
RK
1236/* Return an rtx representing the address of an area of memory dynamically
1237 pushed on the stack. This region of memory is always aligned to
1238 a multiple of BIGGEST_ALIGNMENT.
1239
1240 Any required stack pointer alignment is preserved.
1241
1242 SIZE is an rtx representing the size of the area.
091ad0b9
RK
1243 TARGET is a place in which the address can be placed.
1244
1245 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
18ca7dab
RK
1246
1247rtx
091ad0b9 1248allocate_dynamic_stack_space (size, target, known_align)
18ca7dab
RK
1249 rtx size;
1250 rtx target;
091ad0b9 1251 int known_align;
18ca7dab 1252{
c9ec4f99
DM
1253#ifdef SETJMP_VIA_SAVE_AREA
1254 rtx setjmpless_size = NULL_RTX;
1255#endif
1256
15fc0026 1257 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1258 to since we can't dereference it. But return a reasonable
15fc0026
RK
1259 address anyway. */
1260 if (size == const0_rtx)
1261 return virtual_stack_dynamic_rtx;
1262
1263 /* Otherwise, show we're calling alloca or equivalent. */
1264 current_function_calls_alloca = 1;
1265
18ca7dab
RK
1266 /* Ensure the size is in the proper mode. */
1267 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1268 size = convert_to_mode (Pmode, size, 1);
1269
c2f8b491
JH
1270 /* We can't attempt to minimize alignment necessary, because we don't
1271 know the final value of preferred_stack_boundary yet while executing
1272 this code. */
1273#ifdef PREFERRED_STACK_BOUNDARY
1274 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1275#endif
1276
18ca7dab
RK
1277 /* We will need to ensure that the address we return is aligned to
1278 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1279 always know its final value at this point in the compilation (it
1280 might depend on the size of the outgoing parameter lists, for
1281 example), so we must align the value to be returned in that case.
1282 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1283 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1284 We must also do an alignment operation on the returned value if
1285 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1286
1287 If we have to align, we must leave space in SIZE for the hole
1288 that might result from the alignment operation. */
1289
c795bca9 1290#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
515a7242
JW
1291#define MUST_ALIGN 1
1292#else
c795bca9 1293#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
18ca7dab
RK
1294#endif
1295
515a7242 1296 if (MUST_ALIGN)
d5457140
RK
1297 size
1298 = force_operand (plus_constant (size,
1299 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1300 NULL_RTX);
1d9d04f8 1301
18ca7dab
RK
1302#ifdef SETJMP_VIA_SAVE_AREA
1303 /* If setjmp restores regs from a save area in the stack frame,
1304 avoid clobbering the reg save area. Note that the offset of
1305 virtual_incoming_args_rtx includes the preallocated stack args space.
1306 It would be no problem to clobber that, but it's on the wrong side
1307 of the old save area. */
1308 {
1309 rtx dynamic_offset
1310 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
b1ec3c92 1311 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
c9ec4f99
DM
1312
1313 if (!current_function_calls_setjmp)
1314 {
c795bca9 1315 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
c9ec4f99
DM
1316
1317 /* See optimize_save_area_alloca to understand what is being
1318 set up here. */
1319
c795bca9 1320#if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
c9ec4f99
DM
1321 /* If anyone creates a target with these characteristics, let them
1322 know that our optimization cannot work correctly in such a case. */
d5457140 1323 abort ();
c9ec4f99
DM
1324#endif
1325
1326 if (GET_CODE (size) == CONST_INT)
1327 {
d5457140 1328 HOST_WIDE_INT new = INTVAL (size) / align * align;
c9ec4f99
DM
1329
1330 if (INTVAL (size) != new)
1331 setjmpless_size = GEN_INT (new);
1332 else
1333 setjmpless_size = size;
1334 }
1335 else
1336 {
1337 /* Since we know overflow is not possible, we avoid using
1338 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1339 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1340 GEN_INT (align), NULL_RTX, 1);
1341 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1342 GEN_INT (align), NULL_RTX, 1);
1343 }
1344 /* Our optimization works based upon being able to perform a simple
1345 transformation of this RTL into a (set REG REG) so make sure things
1346 did in fact end up in a REG. */
ee5332b8 1347 if (!register_operand (setjmpless_size, Pmode))
c9ec4f99
DM
1348 setjmpless_size = force_reg (Pmode, setjmpless_size);
1349 }
1350
18ca7dab 1351 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
b1ec3c92 1352 NULL_RTX, 1, OPTAB_LIB_WIDEN);
18ca7dab
RK
1353 }
1354#endif /* SETJMP_VIA_SAVE_AREA */
1355
1356 /* Round the size to a multiple of the required stack alignment.
1357 Since the stack if presumed to be rounded before this allocation,
1358 this will maintain the required alignment.
1359
1360 If the stack grows downward, we could save an insn by subtracting
1361 SIZE from the stack pointer and then aligning the stack pointer.
1362 The problem with this is that the stack pointer may be unaligned
1363 between the execution of the subtraction and alignment insns and
1364 some machines do not allow this. Even on those that do, some
1365 signal handlers malfunction if a signal should occur between those
1366 insns. Since this is an extremely rare event, we have no reliable
1367 way of knowing which systems have this problem. So we avoid even
1368 momentarily mis-aligning the stack. */
1369
c795bca9 1370#ifdef PREFERRED_STACK_BOUNDARY
86b25e81
RS
1371 /* If we added a variable amount to SIZE,
1372 we can no longer assume it is aligned. */
515a7242 1373#if !defined (SETJMP_VIA_SAVE_AREA)
c795bca9 1374 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
34c9156a 1375#endif
091ad0b9 1376 size = round_push (size);
89d825c9 1377#endif
18ca7dab
RK
1378
1379 do_pending_stack_adjust ();
1380
1503a7ec
JH
1381 /* We ought to be called always on the toplevel and stack ought to be aligned
1382 propertly. */
1383#ifdef PREFERRED_STACK_BOUNDARY
1384 if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1385 abort ();
1386#endif
1387
edff2491
RK
1388 /* If needed, check that we have the required amount of stack. Take into
1389 account what has already been checked. */
1390 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1391 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1392
d5457140 1393 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
091ad0b9 1394 if (target == 0 || GET_CODE (target) != REG
d5457140
RK
1395 || REGNO (target) < FIRST_PSEUDO_REGISTER
1396 || GET_MODE (target) != Pmode)
18ca7dab
RK
1397 target = gen_reg_rtx (Pmode);
1398
bdb429a5 1399 mark_reg_pointer (target, known_align);
3ad69266 1400
18ca7dab
RK
1401 /* Perform the required allocation from the stack. Some systems do
1402 this differently than simply incrementing/decrementing from the
38a448ca 1403 stack pointer, such as acquiring the space by calling malloc(). */
18ca7dab
RK
1404#ifdef HAVE_allocate_stack
1405 if (HAVE_allocate_stack)
1406 {
39403d82 1407 enum machine_mode mode = STACK_SIZE_MODE;
a995e389 1408 insn_operand_predicate_fn pred;
39403d82 1409
a995e389
RH
1410 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[0].predicate;
1411 if (pred && ! ((*pred) (target, Pmode)))
e0a52410
JL
1412#ifdef POINTERS_EXTEND_UNSIGNED
1413 target = convert_memory_address (Pmode, target);
1414#else
1415 target = copy_to_mode_reg (Pmode, target);
1416#endif
c5c76735
JL
1417
1418 if (mode == VOIDmode)
1419 mode = Pmode;
1420
a995e389
RH
1421 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1422 if (pred && ! ((*pred) (size, mode)))
39403d82 1423 size = copy_to_mode_reg (mode, size);
18ca7dab 1424
38a448ca 1425 emit_insn (gen_allocate_stack (target, size));
18ca7dab
RK
1426 }
1427 else
1428#endif
ea534b63 1429 {
38a448ca
RH
1430#ifndef STACK_GROWS_DOWNWARD
1431 emit_move_insn (target, virtual_stack_dynamic_rtx);
1432#endif
a157febd
GK
1433
1434 /* Check stack bounds if necessary. */
1435 if (current_function_limit_stack)
1436 {
1437 rtx available;
1438 rtx space_available = gen_label_rtx ();
1439#ifdef STACK_GROWS_DOWNWARD
1440 available = expand_binop (Pmode, sub_optab,
1441 stack_pointer_rtx, stack_limit_rtx,
1442 NULL_RTX, 1, OPTAB_WIDEN);
1443#else
1444 available = expand_binop (Pmode, sub_optab,
1445 stack_limit_rtx, stack_pointer_rtx,
1446 NULL_RTX, 1, OPTAB_WIDEN);
1447#endif
1448 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1449 0, space_available);
1450#ifdef HAVE_trap
1451 if (HAVE_trap)
1452 emit_insn (gen_trap ());
1453 else
1454#endif
1455 error ("stack limits not supported on this target");
1456 emit_barrier ();
1457 emit_label (space_available);
1458 }
1459
ea534b63 1460 anti_adjust_stack (size);
c9ec4f99
DM
1461#ifdef SETJMP_VIA_SAVE_AREA
1462 if (setjmpless_size != NULL_RTX)
1463 {
1464 rtx note_target = get_last_insn ();
1465
9e6a5703
JC
1466 REG_NOTES (note_target)
1467 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1468 REG_NOTES (note_target));
c9ec4f99
DM
1469 }
1470#endif /* SETJMP_VIA_SAVE_AREA */
d5457140 1471
18ca7dab
RK
1472#ifdef STACK_GROWS_DOWNWARD
1473 emit_move_insn (target, virtual_stack_dynamic_rtx);
1474#endif
38a448ca 1475 }
18ca7dab 1476
515a7242 1477 if (MUST_ALIGN)
091ad0b9 1478 {
5244db05 1479 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1480 but we know it can't. So add ourselves and then do
1481 TRUNC_DIV_EXPR. */
0f56a403 1482 target = expand_binop (Pmode, add_optab, target,
5244db05
RK
1483 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1484 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1485 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
b1ec3c92
CH
1486 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1487 NULL_RTX, 1);
091ad0b9 1488 target = expand_mult (Pmode, target,
b1ec3c92
CH
1489 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1490 NULL_RTX, 1);
091ad0b9 1491 }
18ca7dab
RK
1492
1493 /* Some systems require a particular insn to refer to the stack
1494 to make the pages exist. */
1495#ifdef HAVE_probe
1496 if (HAVE_probe)
1497 emit_insn (gen_probe ());
1498#endif
1499
15fc0026 1500 /* Record the new stack level for nonlocal gotos. */
ba716ac9 1501 if (nonlocal_goto_handler_slots != 0)
15fc0026
RK
1502 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1503
18ca7dab
RK
1504 return target;
1505}
1506\f
14a774a9
RK
1507/* A front end may want to override GCC's stack checking by providing a
1508 run-time routine to call to check the stack, so provide a mechanism for
1509 calling that routine. */
1510
1511static rtx stack_check_libfunc;
1512
1513void
1514set_stack_check_libfunc (libfunc)
1515 rtx libfunc;
1516{
1517 stack_check_libfunc = libfunc;
1518}
1519\f
edff2491
RK
1520/* Emit one stack probe at ADDRESS, an address within the stack. */
1521
1522static void
1523emit_stack_probe (address)
1524 rtx address;
1525{
38a448ca 1526 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491
RK
1527
1528 MEM_VOLATILE_P (memref) = 1;
1529
1530 if (STACK_CHECK_PROBE_LOAD)
1531 emit_move_insn (gen_reg_rtx (word_mode), memref);
1532 else
1533 emit_move_insn (memref, const0_rtx);
1534}
1535
1536/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1537 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1538 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1539 subtract from the stack. If SIZE is constant, this is done
1540 with a fixed number of probes. Otherwise, we must make a loop. */
1541
1542#ifdef STACK_GROWS_DOWNWARD
1543#define STACK_GROW_OP MINUS
1544#else
1545#define STACK_GROW_OP PLUS
1546#endif
1547
1548void
1549probe_stack_range (first, size)
1550 HOST_WIDE_INT first;
1551 rtx size;
1552{
14a774a9
RK
1553 /* First see if the front end has set up a function for us to call to
1554 check the stack. */
1555 if (stack_check_libfunc != 0)
f5f5363f
RK
1556 {
1557 rtx addr = memory_address (QImode,
1558 gen_rtx (STACK_GROW_OP, Pmode,
1559 stack_pointer_rtx,
1560 plus_constant (size, first)));
1561
1562#ifdef POINTERS_EXTEND_UNSIGNED
1563 if (GET_MODE (addr) != ptr_mode)
1564 addr = convert_memory_address (ptr_mode, addr);
1565#endif
1566
1567 emit_library_call (stack_check_libfunc, 0, VOIDmode, 1, addr,
1568 ptr_mode);
1569 }
14a774a9
RK
1570
1571 /* Next see if we have an insn to check the stack. Use it if so. */
edff2491 1572#ifdef HAVE_check_stack
14a774a9 1573 else if (HAVE_check_stack)
edff2491 1574 {
a995e389 1575 insn_operand_predicate_fn pred;
38a448ca
RH
1576 rtx last_addr
1577 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1578 stack_pointer_rtx,
1579 plus_constant (size, first)),
1580 NULL_RTX);
edff2491 1581
a995e389
RH
1582 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1583 if (pred && ! ((*pred) (last_addr, Pmode)))
c5c76735 1584 last_addr = copy_to_mode_reg (Pmode, last_addr);
edff2491 1585
c5c76735 1586 emit_insn (gen_check_stack (last_addr));
edff2491
RK
1587 }
1588#endif
1589
1590 /* If we have to generate explicit probes, see if we have a constant
95a086b1 1591 small number of them to generate. If so, that's the easy case. */
14a774a9
RK
1592 else if (GET_CODE (size) == CONST_INT
1593 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
edff2491
RK
1594 {
1595 HOST_WIDE_INT offset;
1596
1597 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1598 for values of N from 1 until it exceeds LAST. If only one
1599 probe is needed, this will not generate any code. Then probe
1600 at LAST. */
1601 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1602 offset < INTVAL (size);
1603 offset = offset + STACK_CHECK_PROBE_INTERVAL)
38a448ca
RH
1604 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1605 stack_pointer_rtx,
1606 GEN_INT (offset)));
edff2491 1607
38a448ca
RH
1608 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1609 stack_pointer_rtx,
1610 plus_constant (size, first)));
edff2491
RK
1611 }
1612
1613 /* In the variable case, do the same as above, but in a loop. We emit loop
1614 notes so that loop optimization can be done. */
1615 else
1616 {
1617 rtx test_addr
38a448ca
RH
1618 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1619 stack_pointer_rtx,
1620 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
edff2491
RK
1621 NULL_RTX);
1622 rtx last_addr
38a448ca
RH
1623 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1624 stack_pointer_rtx,
1625 plus_constant (size, first)),
edff2491
RK
1626 NULL_RTX);
1627 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1628 rtx loop_lab = gen_label_rtx ();
1629 rtx test_lab = gen_label_rtx ();
1630 rtx end_lab = gen_label_rtx ();
1631 rtx temp;
1632
1633 if (GET_CODE (test_addr) != REG
1634 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1635 test_addr = force_reg (Pmode, test_addr);
1636
1637 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1638 emit_jump (test_lab);
1639
1640 emit_label (loop_lab);
1641 emit_stack_probe (test_addr);
1642
1643 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1644
1645#ifdef STACK_GROWS_DOWNWARD
1646#define CMP_OPCODE GTU
1647 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1648 1, OPTAB_WIDEN);
1649#else
1650#define CMP_OPCODE LTU
1651 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1652 1, OPTAB_WIDEN);
1653#endif
1654
1655 if (temp != test_addr)
1656 abort ();
1657
1658 emit_label (test_lab);
c5d5d461
JL
1659 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1660 NULL_RTX, Pmode, 1, 0, loop_lab);
edff2491
RK
1661 emit_jump (end_lab);
1662 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1663 emit_label (end_lab);
1664
1665 emit_stack_probe (last_addr);
1666 }
1667}
1668\f
18ca7dab
RK
1669/* Return an rtx representing the register or memory location
1670 in which a scalar value of data type VALTYPE
1671 was returned by a function call to function FUNC.
1672 FUNC is a FUNCTION_DECL node if the precise function is known,
4dc07bd7
JJ
1673 otherwise 0.
1674 OUTGOING is 1 if on a machine with register windows this function
1675 should return the register in which the function will put its result
1676 and 0 otherwise. */
18ca7dab
RK
1677
1678rtx
4dc07bd7 1679hard_function_value (valtype, func, outgoing)
18ca7dab 1680 tree valtype;
91813b28 1681 tree func ATTRIBUTE_UNUSED;
4dc07bd7 1682 int outgoing ATTRIBUTE_UNUSED;
18ca7dab 1683{
4dc07bd7 1684 rtx val;
770ae6cc 1685
4dc07bd7
JJ
1686#ifdef FUNCTION_OUTGOING_VALUE
1687 if (outgoing)
1688 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1689 else
1690#endif
1691 val = FUNCTION_VALUE (valtype, func);
770ae6cc 1692
e1a4071f
JL
1693 if (GET_CODE (val) == REG
1694 && GET_MODE (val) == BLKmode)
1695 {
770ae6cc 1696 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
e1a4071f 1697 enum machine_mode tmpmode;
770ae6cc 1698
e1a4071f 1699 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
0c61f541 1700 tmpmode != VOIDmode;
e1a4071f
JL
1701 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1702 {
1703 /* Have we found a large enough mode? */
1704 if (GET_MODE_SIZE (tmpmode) >= bytes)
1705 break;
1706 }
1707
1708 /* No suitable mode found. */
0c61f541 1709 if (tmpmode == VOIDmode)
e1a4071f
JL
1710 abort ();
1711
1712 PUT_MODE (val, tmpmode);
1713 }
1714 return val;
18ca7dab
RK
1715}
1716
1717/* Return an rtx representing the register or memory location
1718 in which a scalar value of mode MODE was returned by a library call. */
1719
1720rtx
1721hard_libcall_value (mode)
1722 enum machine_mode mode;
1723{
1724 return LIBCALL_VALUE (mode);
1725}
0c5e217d
RS
1726
1727/* Look up the tree code for a given rtx code
1728 to provide the arithmetic operation for REAL_ARITHMETIC.
1729 The function returns an int because the caller may not know
1730 what `enum tree_code' means. */
1731
1732int
1733rtx_to_tree_code (code)
1734 enum rtx_code code;
1735{
1736 enum tree_code tcode;
1737
1738 switch (code)
1739 {
1740 case PLUS:
1741 tcode = PLUS_EXPR;
1742 break;
1743 case MINUS:
1744 tcode = MINUS_EXPR;
1745 break;
1746 case MULT:
1747 tcode = MULT_EXPR;
1748 break;
1749 case DIV:
1750 tcode = RDIV_EXPR;
1751 break;
1752 case SMIN:
1753 tcode = MIN_EXPR;
1754 break;
1755 case SMAX:
1756 tcode = MAX_EXPR;
1757 break;
1758 default:
1759 tcode = LAST_AND_UNUSED_TREE_CODE;
1760 break;
1761 }
1762 return ((int) tcode);
1763}