]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
semantics.c (begin_function_try_block, [...]): New fns.
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab 1/* Subroutines for manipulating rtx's in semantically interesting ways.
747215f1 2 Copyright (C) 1987, 91, 94-97, 1998, 1999 Free Software Foundation, Inc.
18ca7dab
RK
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
940d9d63
RK
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
18ca7dab
RK
20
21
22#include "config.h"
670ee920 23#include "system.h"
01198c2f 24#include "toplev.h"
18ca7dab
RK
25#include "rtl.h"
26#include "tree.h"
27#include "flags.h"
28#include "expr.h"
29#include "hard-reg-set.h"
30#include "insn-config.h"
31#include "recog.h"
32#include "insn-flags.h"
33#include "insn-codes.h"
34
c795bca9
BS
35#if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
36#define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
37#endif
38
ea534b63 39static rtx break_out_memory_refs PROTO((rtx));
edff2491 40static void emit_stack_probe PROTO((rtx));
7e4ce834
RH
41
42
43/* Truncate and perhaps sign-extend C as appropriate for MODE. */
44
45HOST_WIDE_INT
46trunc_int_for_mode (c, mode)
47 HOST_WIDE_INT c;
48 enum machine_mode mode;
49{
50 int width = GET_MODE_BITSIZE (mode);
51
52 /* We clear out all bits that don't belong in MODE, unless they and our
53 sign bit are all one. So we get either a reasonable negative
54 value or a reasonable unsigned value. */
55
56 if (width < HOST_BITS_PER_WIDE_INT
57 && ((c & ((HOST_WIDE_INT) (-1) << (width - 1)))
58 != ((HOST_WIDE_INT) (-1) << (width - 1))))
59 c &= ((HOST_WIDE_INT) 1 << width) - 1;
60
61 /* If this would be an entire word for the target, but is not for
62 the host, then sign-extend on the host so that the number will look
63 the same way on the host that it would on the target.
64
65 For example, when building a 64 bit alpha hosted 32 bit sparc
66 targeted compiler, then we want the 32 bit unsigned value -1 to be
67 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
68 The later confuses the sparc backend. */
69
70 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
71 && BITS_PER_WORD == width
72 && (c & ((HOST_WIDE_INT) 1 << (width - 1))))
73 c |= ((HOST_WIDE_INT) (-1) << width);
74
75 return c;
76}
77
b1ec3c92
CH
78/* Return an rtx for the sum of X and the integer C.
79
8008b228 80 This function should be used via the `plus_constant' macro. */
18ca7dab
RK
81
82rtx
b1ec3c92 83plus_constant_wide (x, c)
18ca7dab 84 register rtx x;
b1ec3c92 85 register HOST_WIDE_INT c;
18ca7dab
RK
86{
87 register RTX_CODE code;
88 register enum machine_mode mode;
89 register rtx tem;
90 int all_constant = 0;
91
92 if (c == 0)
93 return x;
94
95 restart:
96
97 code = GET_CODE (x);
98 mode = GET_MODE (x);
99 switch (code)
100 {
101 case CONST_INT:
b1ec3c92 102 return GEN_INT (INTVAL (x) + c);
18ca7dab
RK
103
104 case CONST_DOUBLE:
105 {
b1ec3c92
CH
106 HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
107 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
108 HOST_WIDE_INT l2 = c;
109 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
110 HOST_WIDE_INT lv, hv;
18ca7dab
RK
111
112 add_double (l1, h1, l2, h2, &lv, &hv);
113
114 return immed_double_const (lv, hv, VOIDmode);
115 }
116
117 case MEM:
118 /* If this is a reference to the constant pool, try replacing it with
119 a reference to a new constant. If the resulting address isn't
120 valid, don't return it because we have no way to validize it. */
121 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
122 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
123 {
38a448ca
RH
124 /* Any rtl we create here must go in a saveable obstack, since
125 we might have been called from within combine. */
126 push_obstacks_nochange ();
127 rtl_in_saveable_obstack ();
18ca7dab
RK
128 tem
129 = force_const_mem (GET_MODE (x),
130 plus_constant (get_pool_constant (XEXP (x, 0)),
131 c));
38a448ca 132 pop_obstacks ();
18ca7dab
RK
133 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
134 return tem;
135 }
136 break;
137
138 case CONST:
139 /* If adding to something entirely constant, set a flag
140 so that we can add a CONST around the result. */
141 x = XEXP (x, 0);
142 all_constant = 1;
143 goto restart;
144
145 case SYMBOL_REF:
146 case LABEL_REF:
147 all_constant = 1;
148 break;
149
150 case PLUS:
151 /* The interesting case is adding the integer to a sum.
152 Look for constant term in the sum and combine
153 with C. For an integer constant term, we make a combined
154 integer. For a constant term that is not an explicit integer,
e5671f2b
RK
155 we cannot really combine, but group them together anyway.
156
03d937fc
R
157 Restart or use a recursive call in case the remaining operand is
158 something that we handle specially, such as a SYMBOL_REF.
159
160 We may not immediately return from the recursive call here, lest
161 all_constant gets lost. */
e5671f2b
RK
162
163 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
03d937fc
R
164 {
165 c += INTVAL (XEXP (x, 1));
7e4ce834
RH
166
167 if (GET_MODE (x) != VOIDmode)
168 c = trunc_int_for_mode (c, GET_MODE (x));
169
03d937fc
R
170 x = XEXP (x, 0);
171 goto restart;
172 }
18ca7dab 173 else if (CONSTANT_P (XEXP (x, 0)))
03d937fc
R
174 {
175 x = gen_rtx_PLUS (mode,
176 plus_constant (XEXP (x, 0), c),
177 XEXP (x, 1));
178 c = 0;
179 }
18ca7dab 180 else if (CONSTANT_P (XEXP (x, 1)))
03d937fc
R
181 {
182 x = gen_rtx_PLUS (mode,
183 XEXP (x, 0),
184 plus_constant (XEXP (x, 1), c));
185 c = 0;
186 }
38a448ca
RH
187 break;
188
189 default:
190 break;
18ca7dab
RK
191 }
192
193 if (c != 0)
38a448ca 194 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
18ca7dab
RK
195
196 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
197 return x;
198 else if (all_constant)
38a448ca 199 return gen_rtx_CONST (mode, x);
18ca7dab
RK
200 else
201 return x;
202}
203
b1ec3c92
CH
204/* This is the same as `plus_constant', except that it handles LO_SUM.
205
206 This function should be used via the `plus_constant_for_output' macro. */
18ca7dab
RK
207
208rtx
b1ec3c92 209plus_constant_for_output_wide (x, c)
18ca7dab 210 register rtx x;
b1ec3c92 211 register HOST_WIDE_INT c;
18ca7dab 212{
18ca7dab 213 register enum machine_mode mode = GET_MODE (x);
18ca7dab
RK
214
215 if (GET_CODE (x) == LO_SUM)
38a448ca 216 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
18ca7dab
RK
217 plus_constant_for_output (XEXP (x, 1), c));
218
219 else
220 return plus_constant (x, c);
221}
222\f
223/* If X is a sum, return a new sum like X but lacking any constant terms.
224 Add all the removed constant terms into *CONSTPTR.
225 X itself is not altered. The result != X if and only if
226 it is not isomorphic to X. */
227
228rtx
229eliminate_constant_term (x, constptr)
230 rtx x;
231 rtx *constptr;
232{
233 register rtx x0, x1;
234 rtx tem;
235
236 if (GET_CODE (x) != PLUS)
237 return x;
238
239 /* First handle constants appearing at this level explicitly. */
240 if (GET_CODE (XEXP (x, 1)) == CONST_INT
241 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
242 XEXP (x, 1)))
243 && GET_CODE (tem) == CONST_INT)
244 {
245 *constptr = tem;
246 return eliminate_constant_term (XEXP (x, 0), constptr);
247 }
248
249 tem = const0_rtx;
250 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
251 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
252 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
253 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
254 *constptr, tem))
255 && GET_CODE (tem) == CONST_INT)
256 {
257 *constptr = tem;
38a448ca 258 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
18ca7dab
RK
259 }
260
261 return x;
262}
263
264/* Returns the insn that next references REG after INSN, or 0
265 if REG is clobbered before next referenced or we cannot find
266 an insn that references REG in a straight-line piece of code. */
267
268rtx
269find_next_ref (reg, insn)
270 rtx reg;
271 rtx insn;
272{
273 rtx next;
274
275 for (insn = NEXT_INSN (insn); insn; insn = next)
276 {
277 next = NEXT_INSN (insn);
278 if (GET_CODE (insn) == NOTE)
279 continue;
280 if (GET_CODE (insn) == CODE_LABEL
281 || GET_CODE (insn) == BARRIER)
282 return 0;
283 if (GET_CODE (insn) == INSN
284 || GET_CODE (insn) == JUMP_INSN
285 || GET_CODE (insn) == CALL_INSN)
286 {
287 if (reg_set_p (reg, insn))
288 return 0;
289 if (reg_mentioned_p (reg, PATTERN (insn)))
290 return insn;
291 if (GET_CODE (insn) == JUMP_INSN)
292 {
293 if (simplejump_p (insn))
294 next = JUMP_LABEL (insn);
295 else
296 return 0;
297 }
298 if (GET_CODE (insn) == CALL_INSN
299 && REGNO (reg) < FIRST_PSEUDO_REGISTER
300 && call_used_regs[REGNO (reg)])
301 return 0;
302 }
303 else
304 abort ();
305 }
306 return 0;
307}
308
309/* Return an rtx for the size in bytes of the value of EXP. */
310
311rtx
312expr_size (exp)
313 tree exp;
314{
99098c66
RK
315 tree size = size_in_bytes (TREE_TYPE (exp));
316
317 if (TREE_CODE (size) != INTEGER_CST
318 && contains_placeholder_p (size))
319 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
320
8fbea4dc
RK
321 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
322 EXPAND_MEMORY_USE_BAD);
18ca7dab
RK
323}
324\f
325/* Return a copy of X in which all memory references
326 and all constants that involve symbol refs
327 have been replaced with new temporary registers.
328 Also emit code to load the memory locations and constants
329 into those registers.
330
331 If X contains no such constants or memory references,
332 X itself (not a copy) is returned.
333
334 If a constant is found in the address that is not a legitimate constant
335 in an insn, it is left alone in the hope that it might be valid in the
336 address.
337
338 X may contain no arithmetic except addition, subtraction and multiplication.
339 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
340
341static rtx
342break_out_memory_refs (x)
343 register rtx x;
344{
345 if (GET_CODE (x) == MEM
cabeca29 346 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
18ca7dab 347 && GET_MODE (x) != VOIDmode))
2cca6e3f 348 x = force_reg (GET_MODE (x), x);
18ca7dab
RK
349 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
350 || GET_CODE (x) == MULT)
351 {
352 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
353 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
2cca6e3f 354
18ca7dab 355 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 356 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab 357 }
2cca6e3f 358
18ca7dab
RK
359 return x;
360}
361
ea534b63
RK
362#ifdef POINTERS_EXTEND_UNSIGNED
363
364/* Given X, a memory address in ptr_mode, convert it to an address
498b529f
RK
365 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
366 the fact that pointers are not allowed to overflow by commuting arithmetic
367 operations over conversions so that address arithmetic insns can be
368 used. */
ea534b63 369
498b529f
RK
370rtx
371convert_memory_address (to_mode, x)
372 enum machine_mode to_mode;
ea534b63
RK
373 rtx x;
374{
0b04ec8c 375 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
498b529f
RK
376 rtx temp;
377
0b04ec8c
RK
378 /* Here we handle some special cases. If none of them apply, fall through
379 to the default case. */
ea534b63
RK
380 switch (GET_CODE (x))
381 {
382 case CONST_INT:
383 case CONST_DOUBLE:
498b529f
RK
384 return x;
385
ea534b63 386 case LABEL_REF:
38a448ca
RH
387 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
388 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
389 return temp;
498b529f 390
ea534b63 391 case SYMBOL_REF:
38a448ca 392 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
498b529f 393 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
d7dc4377 394 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
498b529f 395 return temp;
ea534b63 396
498b529f 397 case CONST:
38a448ca
RH
398 return gen_rtx_CONST (to_mode,
399 convert_memory_address (to_mode, XEXP (x, 0)));
ea534b63 400
0b04ec8c
RK
401 case PLUS:
402 case MULT:
403 /* For addition the second operand is a small constant, we can safely
38a448ca 404 permute the conversion and addition operation. We can always safely
60725c78
RK
405 permute them if we are making the address narrower. In addition,
406 always permute the operations if this is a constant. */
0b04ec8c
RK
407 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
408 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
60725c78
RK
409 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
410 || CONSTANT_P (XEXP (x, 0)))))
38a448ca
RH
411 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
412 convert_memory_address (to_mode, XEXP (x, 0)),
413 convert_memory_address (to_mode, XEXP (x, 1)));
414 break;
415
416 default:
417 break;
ea534b63 418 }
0b04ec8c
RK
419
420 return convert_modes (to_mode, from_mode,
421 x, POINTERS_EXTEND_UNSIGNED);
ea534b63
RK
422}
423#endif
424
18ca7dab
RK
425/* Given a memory address or facsimile X, construct a new address,
426 currently equivalent, that is stable: future stores won't change it.
427
428 X must be composed of constants, register and memory references
429 combined with addition, subtraction and multiplication:
430 in other words, just what you can get from expand_expr if sum_ok is 1.
431
432 Works by making copies of all regs and memory locations used
433 by X and combining them the same way X does.
434 You could also stabilize the reference to this address
435 by copying the address to a register with copy_to_reg;
436 but then you wouldn't get indexed addressing in the reference. */
437
438rtx
439copy_all_regs (x)
440 register rtx x;
441{
442 if (GET_CODE (x) == REG)
443 {
11c50c5e
DE
444 if (REGNO (x) != FRAME_POINTER_REGNUM
445#if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
446 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
447#endif
448 )
18ca7dab
RK
449 x = copy_to_reg (x);
450 }
451 else if (GET_CODE (x) == MEM)
452 x = copy_to_reg (x);
453 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
454 || GET_CODE (x) == MULT)
455 {
456 register rtx op0 = copy_all_regs (XEXP (x, 0));
457 register rtx op1 = copy_all_regs (XEXP (x, 1));
458 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
38a448ca 459 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
18ca7dab
RK
460 }
461 return x;
462}
463\f
464/* Return something equivalent to X but valid as a memory address
465 for something of mode MODE. When X is not itself valid, this
466 works by copying X or subexpressions of it into registers. */
467
468rtx
469memory_address (mode, x)
470 enum machine_mode mode;
471 register rtx x;
472{
18b9ca6f 473 register rtx oldx = x;
18ca7dab 474
38a448ca
RH
475 if (GET_CODE (x) == ADDRESSOF)
476 return x;
477
ea534b63
RK
478#ifdef POINTERS_EXTEND_UNSIGNED
479 if (GET_MODE (x) == ptr_mode)
498b529f 480 x = convert_memory_address (Pmode, x);
ea534b63
RK
481#endif
482
18ca7dab
RK
483 /* By passing constant addresses thru registers
484 we get a chance to cse them. */
cabeca29 485 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
18b9ca6f 486 x = force_reg (Pmode, x);
18ca7dab
RK
487
488 /* Accept a QUEUED that refers to a REG
489 even though that isn't a valid address.
490 On attempting to put this in an insn we will call protect_from_queue
491 which will turn it into a REG, which is valid. */
18b9ca6f 492 else if (GET_CODE (x) == QUEUED
18ca7dab 493 && GET_CODE (QUEUED_VAR (x)) == REG)
18b9ca6f 494 ;
18ca7dab
RK
495
496 /* We get better cse by rejecting indirect addressing at this stage.
497 Let the combiner create indirect addresses where appropriate.
498 For now, generate the code so that the subexpressions useful to share
499 are visible. But not if cse won't be done! */
18b9ca6f 500 else
18ca7dab 501 {
18b9ca6f
RK
502 if (! cse_not_expected && GET_CODE (x) != REG)
503 x = break_out_memory_refs (x);
504
505 /* At this point, any valid address is accepted. */
506 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
507
508 /* If it was valid before but breaking out memory refs invalidated it,
509 use it the old way. */
510 if (memory_address_p (mode, oldx))
511 goto win2;
512
513 /* Perform machine-dependent transformations on X
514 in certain cases. This is not necessary since the code
515 below can handle all possible cases, but machine-dependent
516 transformations can make better code. */
517 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
518
519 /* PLUS and MULT can appear in special ways
520 as the result of attempts to make an address usable for indexing.
521 Usually they are dealt with by calling force_operand, below.
522 But a sum containing constant terms is special
523 if removing them makes the sum a valid address:
524 then we generate that address in a register
525 and index off of it. We do this because it often makes
526 shorter code, and because the addresses thus generated
527 in registers often become common subexpressions. */
528 if (GET_CODE (x) == PLUS)
529 {
530 rtx constant_term = const0_rtx;
531 rtx y = eliminate_constant_term (x, &constant_term);
532 if (constant_term == const0_rtx
533 || ! memory_address_p (mode, y))
534 x = force_operand (x, NULL_RTX);
535 else
536 {
38a448ca 537 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
18b9ca6f
RK
538 if (! memory_address_p (mode, y))
539 x = force_operand (x, NULL_RTX);
540 else
541 x = y;
542 }
543 }
18ca7dab 544
e475ed2a 545 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
18b9ca6f 546 x = force_operand (x, NULL_RTX);
18ca7dab 547
18b9ca6f
RK
548 /* If we have a register that's an invalid address,
549 it must be a hard reg of the wrong class. Copy it to a pseudo. */
550 else if (GET_CODE (x) == REG)
551 x = copy_to_reg (x);
552
553 /* Last resort: copy the value to a register, since
554 the register is a valid address. */
555 else
556 x = force_reg (Pmode, x);
557
558 goto done;
18ca7dab 559
c02a7fbb
RK
560 win2:
561 x = oldx;
562 win:
563 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
564 /* Don't copy an addr via a reg if it is one of our stack slots. */
565 && ! (GET_CODE (x) == PLUS
566 && (XEXP (x, 0) == virtual_stack_vars_rtx
567 || XEXP (x, 0) == virtual_incoming_args_rtx)))
568 {
569 if (general_operand (x, Pmode))
570 x = force_reg (Pmode, x);
571 else
572 x = force_operand (x, NULL_RTX);
573 }
18ca7dab 574 }
18b9ca6f
RK
575
576 done:
577
2cca6e3f
RK
578 /* If we didn't change the address, we are done. Otherwise, mark
579 a reg as a pointer if we have REG or REG + CONST_INT. */
580 if (oldx == x)
581 return x;
582 else if (GET_CODE (x) == REG)
305f22b5 583 mark_reg_pointer (x, 1);
2cca6e3f
RK
584 else if (GET_CODE (x) == PLUS
585 && GET_CODE (XEXP (x, 0)) == REG
586 && GET_CODE (XEXP (x, 1)) == CONST_INT)
305f22b5 587 mark_reg_pointer (XEXP (x, 0), 1);
2cca6e3f 588
18b9ca6f
RK
589 /* OLDX may have been the address on a temporary. Update the address
590 to indicate that X is now used. */
591 update_temp_slot_address (oldx, x);
592
18ca7dab
RK
593 return x;
594}
595
596/* Like `memory_address' but pretend `flag_force_addr' is 0. */
597
598rtx
599memory_address_noforce (mode, x)
600 enum machine_mode mode;
601 rtx x;
602{
603 int ambient_force_addr = flag_force_addr;
604 rtx val;
605
606 flag_force_addr = 0;
607 val = memory_address (mode, x);
608 flag_force_addr = ambient_force_addr;
609 return val;
610}
611
612/* Convert a mem ref into one with a valid memory address.
613 Pass through anything else unchanged. */
614
615rtx
616validize_mem (ref)
617 rtx ref;
618{
619 if (GET_CODE (ref) != MEM)
620 return ref;
621 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
622 return ref;
623 /* Don't alter REF itself, since that is probably a stack slot. */
624 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
625}
626\f
627/* Return a modified copy of X with its memory address copied
628 into a temporary register to protect it from side effects.
629 If X is not a MEM, it is returned unchanged (and not copied).
630 Perhaps even if it is a MEM, if there is no need to change it. */
631
632rtx
633stabilize (x)
634 rtx x;
635{
636 register rtx addr;
637 if (GET_CODE (x) != MEM)
638 return x;
639 addr = XEXP (x, 0);
640 if (rtx_unstable_p (addr))
641 {
642 rtx temp = copy_all_regs (addr);
643 rtx mem;
644 if (GET_CODE (temp) != REG)
645 temp = copy_to_reg (temp);
38a448ca 646 mem = gen_rtx_MEM (GET_MODE (x), temp);
18ca7dab
RK
647
648 /* Mark returned memref with in_struct if it's in an array or
649 structure. Copy const and volatile from original memref. */
650
18ca7dab 651 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
c6df88cb
MM
652 MEM_COPY_ATTRIBUTES (mem, x);
653 if (GET_CODE (addr) == PLUS)
654 MEM_SET_IN_STRUCT_P (mem, 1);
41472af8
MM
655
656 /* Since the new MEM is just like the old X, it can alias only
657 the things that X could. */
658 MEM_ALIAS_SET (mem) = MEM_ALIAS_SET (x);
659
18ca7dab
RK
660 return mem;
661 }
662 return x;
663}
664\f
665/* Copy the value or contents of X to a new temp reg and return that reg. */
666
667rtx
668copy_to_reg (x)
669 rtx x;
670{
671 register rtx temp = gen_reg_rtx (GET_MODE (x));
672
673 /* If not an operand, must be an address with PLUS and MULT so
674 do the computation. */
675 if (! general_operand (x, VOIDmode))
676 x = force_operand (x, temp);
677
678 if (x != temp)
679 emit_move_insn (temp, x);
680
681 return temp;
682}
683
684/* Like copy_to_reg but always give the new register mode Pmode
685 in case X is a constant. */
686
687rtx
688copy_addr_to_reg (x)
689 rtx x;
690{
691 return copy_to_mode_reg (Pmode, x);
692}
693
694/* Like copy_to_reg but always give the new register mode MODE
695 in case X is a constant. */
696
697rtx
698copy_to_mode_reg (mode, x)
699 enum machine_mode mode;
700 rtx x;
701{
702 register rtx temp = gen_reg_rtx (mode);
703
704 /* If not an operand, must be an address with PLUS and MULT so
705 do the computation. */
706 if (! general_operand (x, VOIDmode))
707 x = force_operand (x, temp);
708
709 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
710 abort ();
711 if (x != temp)
712 emit_move_insn (temp, x);
713 return temp;
714}
715
716/* Load X into a register if it is not already one.
717 Use mode MODE for the register.
718 X should be valid for mode MODE, but it may be a constant which
719 is valid for all integer modes; that's why caller must specify MODE.
720
721 The caller must not alter the value in the register we return,
722 since we mark it as a "constant" register. */
723
724rtx
725force_reg (mode, x)
726 enum machine_mode mode;
727 rtx x;
728{
62874575 729 register rtx temp, insn, set;
18ca7dab
RK
730
731 if (GET_CODE (x) == REG)
732 return x;
96843fa2 733
18ca7dab 734 temp = gen_reg_rtx (mode);
96843fa2
NC
735
736 if (! general_operand (x, mode))
737 x = force_operand (x, NULL_RTX);
738
18ca7dab 739 insn = emit_move_insn (temp, x);
62874575 740
18ca7dab 741 /* Let optimizers know that TEMP's value never changes
62874575
RK
742 and that X can be substituted for it. Don't get confused
743 if INSN set something else (such as a SUBREG of TEMP). */
744 if (CONSTANT_P (x)
745 && (set = single_set (insn)) != 0
746 && SET_DEST (set) == temp)
18ca7dab 747 {
b1ec3c92 748 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
18ca7dab
RK
749
750 if (note)
751 XEXP (note, 0) = x;
752 else
38a448ca 753 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
18ca7dab
RK
754 }
755 return temp;
756}
757
758/* If X is a memory ref, copy its contents to a new temp reg and return
759 that reg. Otherwise, return X. */
760
761rtx
762force_not_mem (x)
763 rtx x;
764{
765 register rtx temp;
766 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
767 return x;
768 temp = gen_reg_rtx (GET_MODE (x));
769 emit_move_insn (temp, x);
770 return temp;
771}
772
773/* Copy X to TARGET (if it's nonzero and a reg)
774 or to a new temp reg and return that reg.
775 MODE is the mode to use for X in case it is a constant. */
776
777rtx
778copy_to_suggested_reg (x, target, mode)
779 rtx x, target;
780 enum machine_mode mode;
781{
782 register rtx temp;
783
784 if (target && GET_CODE (target) == REG)
785 temp = target;
786 else
787 temp = gen_reg_rtx (mode);
788
789 emit_move_insn (temp, x);
790 return temp;
791}
792\f
9ff65789
RK
793/* Return the mode to use to store a scalar of TYPE and MODE.
794 PUNSIGNEDP points to the signedness of the type and may be adjusted
795 to show what signedness to use on extension operations.
796
797 FOR_CALL is non-zero if this call is promoting args for a call. */
798
799enum machine_mode
800promote_mode (type, mode, punsignedp, for_call)
801 tree type;
802 enum machine_mode mode;
803 int *punsignedp;
c84e2712 804 int for_call ATTRIBUTE_UNUSED;
9ff65789
RK
805{
806 enum tree_code code = TREE_CODE (type);
807 int unsignedp = *punsignedp;
808
809#ifdef PROMOTE_FOR_CALL_ONLY
810 if (! for_call)
811 return mode;
812#endif
813
814 switch (code)
815 {
816#ifdef PROMOTE_MODE
817 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
818 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
819 PROMOTE_MODE (mode, unsignedp, type);
820 break;
821#endif
822
ea534b63 823#ifdef POINTERS_EXTEND_UNSIGNED
56a4c9e2 824 case REFERENCE_TYPE:
9ff65789 825 case POINTER_TYPE:
ea534b63
RK
826 mode = Pmode;
827 unsignedp = POINTERS_EXTEND_UNSIGNED;
9ff65789 828 break;
ea534b63 829#endif
38a448ca
RH
830
831 default:
832 break;
9ff65789
RK
833 }
834
835 *punsignedp = unsignedp;
836 return mode;
837}
838\f
18ca7dab
RK
839/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
840 This pops when ADJUST is positive. ADJUST need not be constant. */
841
842void
843adjust_stack (adjust)
844 rtx adjust;
845{
846 rtx temp;
847 adjust = protect_from_queue (adjust, 0);
848
849 if (adjust == const0_rtx)
850 return;
851
852 temp = expand_binop (Pmode,
853#ifdef STACK_GROWS_DOWNWARD
854 add_optab,
855#else
856 sub_optab,
857#endif
858 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
859 OPTAB_LIB_WIDEN);
860
861 if (temp != stack_pointer_rtx)
862 emit_move_insn (stack_pointer_rtx, temp);
863}
864
865/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
866 This pushes when ADJUST is positive. ADJUST need not be constant. */
867
868void
869anti_adjust_stack (adjust)
870 rtx adjust;
871{
872 rtx temp;
873 adjust = protect_from_queue (adjust, 0);
874
875 if (adjust == const0_rtx)
876 return;
877
878 temp = expand_binop (Pmode,
879#ifdef STACK_GROWS_DOWNWARD
880 sub_optab,
881#else
882 add_optab,
883#endif
884 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
885 OPTAB_LIB_WIDEN);
886
887 if (temp != stack_pointer_rtx)
888 emit_move_insn (stack_pointer_rtx, temp);
889}
890
891/* Round the size of a block to be pushed up to the boundary required
892 by this machine. SIZE is the desired size, which need not be constant. */
893
894rtx
895round_push (size)
896 rtx size;
897{
c795bca9
BS
898#ifdef PREFERRED_STACK_BOUNDARY
899 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
18ca7dab
RK
900 if (align == 1)
901 return size;
902 if (GET_CODE (size) == CONST_INT)
903 {
904 int new = (INTVAL (size) + align - 1) / align * align;
905 if (INTVAL (size) != new)
b1ec3c92 906 size = GEN_INT (new);
18ca7dab
RK
907 }
908 else
909 {
5244db05 910 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
911 but we know it can't. So add ourselves and then do
912 TRUNC_DIV_EXPR. */
5244db05
RK
913 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
914 NULL_RTX, 1, OPTAB_LIB_WIDEN);
915 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
b1ec3c92
CH
916 NULL_RTX, 1);
917 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
18ca7dab 918 }
c795bca9 919#endif /* PREFERRED_STACK_BOUNDARY */
18ca7dab
RK
920 return size;
921}
922\f
59257ff7
RK
923/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
924 to a previously-created save area. If no save area has been allocated,
925 this function will allocate one. If a save area is specified, it
926 must be of the proper mode.
927
928 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
929 are emitted at the current position. */
930
931void
932emit_stack_save (save_level, psave, after)
933 enum save_level save_level;
934 rtx *psave;
935 rtx after;
936{
937 rtx sa = *psave;
938 /* The default is that we use a move insn and save in a Pmode object. */
0ddc9a94 939 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
a260abc9 940 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
59257ff7
RK
941
942 /* See if this machine has anything special to do for this kind of save. */
943 switch (save_level)
944 {
945#ifdef HAVE_save_stack_block
946 case SAVE_BLOCK:
947 if (HAVE_save_stack_block)
a260abc9 948 fcn = gen_save_stack_block;
59257ff7
RK
949 break;
950#endif
951#ifdef HAVE_save_stack_function
952 case SAVE_FUNCTION:
953 if (HAVE_save_stack_function)
a260abc9 954 fcn = gen_save_stack_function;
59257ff7
RK
955 break;
956#endif
957#ifdef HAVE_save_stack_nonlocal
958 case SAVE_NONLOCAL:
959 if (HAVE_save_stack_nonlocal)
a260abc9 960 fcn = gen_save_stack_nonlocal;
59257ff7
RK
961 break;
962#endif
38a448ca
RH
963 default:
964 break;
59257ff7
RK
965 }
966
967 /* If there is no save area and we have to allocate one, do so. Otherwise
968 verify the save area is the proper mode. */
969
970 if (sa == 0)
971 {
972 if (mode != VOIDmode)
973 {
974 if (save_level == SAVE_NONLOCAL)
975 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
976 else
977 *psave = sa = gen_reg_rtx (mode);
978 }
979 }
980 else
981 {
982 if (mode == VOIDmode || GET_MODE (sa) != mode)
983 abort ();
984 }
985
986 if (after)
700f6f98
RK
987 {
988 rtx seq;
989
990 start_sequence ();
5460015d
JW
991 /* We must validize inside the sequence, to ensure that any instructions
992 created by the validize call also get moved to the right place. */
993 if (sa != 0)
994 sa = validize_mem (sa);
d072107f 995 emit_insn (fcn (sa, stack_pointer_rtx));
700f6f98
RK
996 seq = gen_sequence ();
997 end_sequence ();
998 emit_insn_after (seq, after);
999 }
59257ff7 1000 else
5460015d
JW
1001 {
1002 if (sa != 0)
1003 sa = validize_mem (sa);
1004 emit_insn (fcn (sa, stack_pointer_rtx));
1005 }
59257ff7
RK
1006}
1007
1008/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1009 area made by emit_stack_save. If it is zero, we have nothing to do.
1010
1011 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1012 current position. */
1013
1014void
1015emit_stack_restore (save_level, sa, after)
1016 enum save_level save_level;
1017 rtx after;
1018 rtx sa;
1019{
1020 /* The default is that we use a move insn. */
0ddc9a94 1021 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
59257ff7
RK
1022
1023 /* See if this machine has anything special to do for this kind of save. */
1024 switch (save_level)
1025 {
1026#ifdef HAVE_restore_stack_block
1027 case SAVE_BLOCK:
1028 if (HAVE_restore_stack_block)
1029 fcn = gen_restore_stack_block;
1030 break;
1031#endif
1032#ifdef HAVE_restore_stack_function
1033 case SAVE_FUNCTION:
1034 if (HAVE_restore_stack_function)
1035 fcn = gen_restore_stack_function;
1036 break;
1037#endif
1038#ifdef HAVE_restore_stack_nonlocal
59257ff7
RK
1039 case SAVE_NONLOCAL:
1040 if (HAVE_restore_stack_nonlocal)
1041 fcn = gen_restore_stack_nonlocal;
1042 break;
1043#endif
38a448ca
RH
1044 default:
1045 break;
59257ff7
RK
1046 }
1047
d072107f
RK
1048 if (sa != 0)
1049 sa = validize_mem (sa);
1050
59257ff7 1051 if (after)
700f6f98
RK
1052 {
1053 rtx seq;
1054
1055 start_sequence ();
d072107f 1056 emit_insn (fcn (stack_pointer_rtx, sa));
700f6f98
RK
1057 seq = gen_sequence ();
1058 end_sequence ();
1059 emit_insn_after (seq, after);
1060 }
59257ff7 1061 else
d072107f 1062 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7
RK
1063}
1064\f
c9ec4f99
DM
1065#ifdef SETJMP_VIA_SAVE_AREA
1066/* Optimize RTL generated by allocate_dynamic_stack_space for targets
1067 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1068 platforms, the dynamic stack space used can corrupt the original
1069 frame, thus causing a crash if a longjmp unwinds to it. */
1070
1071void
1072optimize_save_area_alloca (insns)
1073 rtx insns;
1074{
1075 rtx insn;
1076
1077 for (insn = insns; insn; insn = NEXT_INSN(insn))
1078 {
1079 rtx note;
1080
1081 if (GET_CODE (insn) != INSN)
1082 continue;
1083
1084 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1085 {
1086 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1087 continue;
1088
1089 if (!current_function_calls_setjmp)
1090 {
1091 rtx pat = PATTERN (insn);
1092
1093 /* If we do not see the note in a pattern matching
1094 these precise characteristics, we did something
1095 entirely wrong in allocate_dynamic_stack_space.
1096
38e01259 1097 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
c9ec4f99
DM
1098 was defined on a machine where stacks grow towards higher
1099 addresses.
1100
1101 Right now only supported port with stack that grow upward
1102 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1103 if (GET_CODE (pat) != SET
1104 || SET_DEST (pat) != stack_pointer_rtx
1105 || GET_CODE (SET_SRC (pat)) != MINUS
1106 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1107 abort ();
1108
1109 /* This will now be transformed into a (set REG REG)
1110 so we can just blow away all the other notes. */
1111 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1112 REG_NOTES (insn) = NULL_RTX;
1113 }
1114 else
1115 {
1116 /* setjmp was called, we must remove the REG_SAVE_AREA
1117 note so that later passes do not get confused by its
1118 presence. */
1119 if (note == REG_NOTES (insn))
1120 {
1121 REG_NOTES (insn) = XEXP (note, 1);
1122 }
1123 else
1124 {
1125 rtx srch;
1126
1127 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1128 if (XEXP (srch, 1) == note)
1129 break;
1130
1131 if (srch == NULL_RTX)
1132 abort();
1133
1134 XEXP (srch, 1) = XEXP (note, 1);
1135 }
1136 }
1137 /* Once we've seen the note of interest, we need not look at
1138 the rest of them. */
1139 break;
1140 }
1141 }
1142}
1143#endif /* SETJMP_VIA_SAVE_AREA */
1144
18ca7dab
RK
1145/* Return an rtx representing the address of an area of memory dynamically
1146 pushed on the stack. This region of memory is always aligned to
1147 a multiple of BIGGEST_ALIGNMENT.
1148
1149 Any required stack pointer alignment is preserved.
1150
1151 SIZE is an rtx representing the size of the area.
091ad0b9
RK
1152 TARGET is a place in which the address can be placed.
1153
1154 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
18ca7dab
RK
1155
1156rtx
091ad0b9 1157allocate_dynamic_stack_space (size, target, known_align)
18ca7dab
RK
1158 rtx size;
1159 rtx target;
091ad0b9 1160 int known_align;
18ca7dab 1161{
c9ec4f99
DM
1162#ifdef SETJMP_VIA_SAVE_AREA
1163 rtx setjmpless_size = NULL_RTX;
1164#endif
1165
15fc0026 1166 /* If we're asking for zero bytes, it doesn't matter what we point
9faa82d8 1167 to since we can't dereference it. But return a reasonable
15fc0026
RK
1168 address anyway. */
1169 if (size == const0_rtx)
1170 return virtual_stack_dynamic_rtx;
1171
1172 /* Otherwise, show we're calling alloca or equivalent. */
1173 current_function_calls_alloca = 1;
1174
18ca7dab
RK
1175 /* Ensure the size is in the proper mode. */
1176 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1177 size = convert_to_mode (Pmode, size, 1);
1178
1179 /* We will need to ensure that the address we return is aligned to
1180 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1181 always know its final value at this point in the compilation (it
1182 might depend on the size of the outgoing parameter lists, for
1183 example), so we must align the value to be returned in that case.
1184 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1185 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1186 We must also do an alignment operation on the returned value if
1187 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1188
1189 If we have to align, we must leave space in SIZE for the hole
1190 that might result from the alignment operation. */
1191
c795bca9 1192#if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
515a7242
JW
1193#define MUST_ALIGN 1
1194#else
c795bca9 1195#define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
18ca7dab
RK
1196#endif
1197
515a7242 1198 if (MUST_ALIGN)
3b998c11
RK
1199 {
1200 if (GET_CODE (size) == CONST_INT)
b1ec3c92
CH
1201 size = GEN_INT (INTVAL (size)
1202 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
3b998c11
RK
1203 else
1204 size = expand_binop (Pmode, add_optab, size,
b1ec3c92
CH
1205 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1206 NULL_RTX, 1, OPTAB_LIB_WIDEN);
3b998c11 1207 }
1d9d04f8 1208
18ca7dab
RK
1209#ifdef SETJMP_VIA_SAVE_AREA
1210 /* If setjmp restores regs from a save area in the stack frame,
1211 avoid clobbering the reg save area. Note that the offset of
1212 virtual_incoming_args_rtx includes the preallocated stack args space.
1213 It would be no problem to clobber that, but it's on the wrong side
1214 of the old save area. */
1215 {
1216 rtx dynamic_offset
1217 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
b1ec3c92 1218 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
c9ec4f99
DM
1219
1220 if (!current_function_calls_setjmp)
1221 {
c795bca9 1222 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
c9ec4f99
DM
1223
1224 /* See optimize_save_area_alloca to understand what is being
1225 set up here. */
1226
c795bca9 1227#if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
c9ec4f99
DM
1228 /* If anyone creates a target with these characteristics, let them
1229 know that our optimization cannot work correctly in such a case. */
1230 abort();
1231#endif
1232
1233 if (GET_CODE (size) == CONST_INT)
1234 {
1235 int new = INTVAL (size) / align * align;
1236
1237 if (INTVAL (size) != new)
1238 setjmpless_size = GEN_INT (new);
1239 else
1240 setjmpless_size = size;
1241 }
1242 else
1243 {
1244 /* Since we know overflow is not possible, we avoid using
1245 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1246 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1247 GEN_INT (align), NULL_RTX, 1);
1248 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1249 GEN_INT (align), NULL_RTX, 1);
1250 }
1251 /* Our optimization works based upon being able to perform a simple
1252 transformation of this RTL into a (set REG REG) so make sure things
1253 did in fact end up in a REG. */
ee5332b8 1254 if (!register_operand (setjmpless_size, Pmode))
c9ec4f99
DM
1255 setjmpless_size = force_reg (Pmode, setjmpless_size);
1256 }
1257
18ca7dab 1258 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
b1ec3c92 1259 NULL_RTX, 1, OPTAB_LIB_WIDEN);
18ca7dab
RK
1260 }
1261#endif /* SETJMP_VIA_SAVE_AREA */
1262
1263 /* Round the size to a multiple of the required stack alignment.
1264 Since the stack if presumed to be rounded before this allocation,
1265 this will maintain the required alignment.
1266
1267 If the stack grows downward, we could save an insn by subtracting
1268 SIZE from the stack pointer and then aligning the stack pointer.
1269 The problem with this is that the stack pointer may be unaligned
1270 between the execution of the subtraction and alignment insns and
1271 some machines do not allow this. Even on those that do, some
1272 signal handlers malfunction if a signal should occur between those
1273 insns. Since this is an extremely rare event, we have no reliable
1274 way of knowing which systems have this problem. So we avoid even
1275 momentarily mis-aligning the stack. */
1276
c795bca9 1277#ifdef PREFERRED_STACK_BOUNDARY
86b25e81
RS
1278 /* If we added a variable amount to SIZE,
1279 we can no longer assume it is aligned. */
515a7242 1280#if !defined (SETJMP_VIA_SAVE_AREA)
c795bca9 1281 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
34c9156a 1282#endif
091ad0b9 1283 size = round_push (size);
89d825c9 1284#endif
18ca7dab
RK
1285
1286 do_pending_stack_adjust ();
1287
edff2491
RK
1288 /* If needed, check that we have the required amount of stack. Take into
1289 account what has already been checked. */
1290 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1291 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1292
091ad0b9
RK
1293 /* Don't use a TARGET that isn't a pseudo. */
1294 if (target == 0 || GET_CODE (target) != REG
1295 || REGNO (target) < FIRST_PSEUDO_REGISTER)
18ca7dab
RK
1296 target = gen_reg_rtx (Pmode);
1297
305f22b5 1298 mark_reg_pointer (target, known_align / BITS_PER_UNIT);
3ad69266 1299
18ca7dab
RK
1300 /* Perform the required allocation from the stack. Some systems do
1301 this differently than simply incrementing/decrementing from the
38a448ca 1302 stack pointer, such as acquiring the space by calling malloc(). */
18ca7dab
RK
1303#ifdef HAVE_allocate_stack
1304 if (HAVE_allocate_stack)
1305 {
39403d82
DE
1306 enum machine_mode mode = STACK_SIZE_MODE;
1307
18ca7dab
RK
1308 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1309 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
38a448ca 1310 (target, Pmode)))
e0a52410
JL
1311#ifdef POINTERS_EXTEND_UNSIGNED
1312 target = convert_memory_address (Pmode, target);
1313#else
1314 target = copy_to_mode_reg (Pmode, target);
1315#endif
39403d82 1316 size = convert_modes (mode, ptr_mode, size, 1);
38a448ca
RH
1317 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
1318 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
39403d82
DE
1319 (size, mode)))
1320 size = copy_to_mode_reg (mode, size);
18ca7dab 1321
38a448ca 1322 emit_insn (gen_allocate_stack (target, size));
18ca7dab
RK
1323 }
1324 else
1325#endif
ea534b63 1326 {
38a448ca
RH
1327#ifndef STACK_GROWS_DOWNWARD
1328 emit_move_insn (target, virtual_stack_dynamic_rtx);
1329#endif
ea534b63
RK
1330 size = convert_modes (Pmode, ptr_mode, size, 1);
1331 anti_adjust_stack (size);
c9ec4f99
DM
1332#ifdef SETJMP_VIA_SAVE_AREA
1333 if (setjmpless_size != NULL_RTX)
1334 {
1335 rtx note_target = get_last_insn ();
1336
9e6a5703
JC
1337 REG_NOTES (note_target)
1338 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1339 REG_NOTES (note_target));
c9ec4f99
DM
1340 }
1341#endif /* SETJMP_VIA_SAVE_AREA */
18ca7dab
RK
1342#ifdef STACK_GROWS_DOWNWARD
1343 emit_move_insn (target, virtual_stack_dynamic_rtx);
1344#endif
38a448ca 1345 }
18ca7dab 1346
515a7242 1347 if (MUST_ALIGN)
091ad0b9 1348 {
5244db05 1349 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
0f41302f
MS
1350 but we know it can't. So add ourselves and then do
1351 TRUNC_DIV_EXPR. */
0f56a403 1352 target = expand_binop (Pmode, add_optab, target,
5244db05
RK
1353 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1354 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1355 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
b1ec3c92
CH
1356 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1357 NULL_RTX, 1);
091ad0b9 1358 target = expand_mult (Pmode, target,
b1ec3c92
CH
1359 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1360 NULL_RTX, 1);
091ad0b9 1361 }
18ca7dab
RK
1362
1363 /* Some systems require a particular insn to refer to the stack
1364 to make the pages exist. */
1365#ifdef HAVE_probe
1366 if (HAVE_probe)
1367 emit_insn (gen_probe ());
1368#endif
1369
15fc0026 1370 /* Record the new stack level for nonlocal gotos. */
ba716ac9 1371 if (nonlocal_goto_handler_slots != 0)
15fc0026
RK
1372 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1373
18ca7dab
RK
1374 return target;
1375}
1376\f
edff2491
RK
1377/* Emit one stack probe at ADDRESS, an address within the stack. */
1378
1379static void
1380emit_stack_probe (address)
1381 rtx address;
1382{
38a448ca 1383 rtx memref = gen_rtx_MEM (word_mode, address);
edff2491
RK
1384
1385 MEM_VOLATILE_P (memref) = 1;
1386
1387 if (STACK_CHECK_PROBE_LOAD)
1388 emit_move_insn (gen_reg_rtx (word_mode), memref);
1389 else
1390 emit_move_insn (memref, const0_rtx);
1391}
1392
1393/* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1394 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1395 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1396 subtract from the stack. If SIZE is constant, this is done
1397 with a fixed number of probes. Otherwise, we must make a loop. */
1398
1399#ifdef STACK_GROWS_DOWNWARD
1400#define STACK_GROW_OP MINUS
1401#else
1402#define STACK_GROW_OP PLUS
1403#endif
1404
1405void
1406probe_stack_range (first, size)
1407 HOST_WIDE_INT first;
1408 rtx size;
1409{
1410 /* First see if we have an insn to check the stack. Use it if so. */
1411#ifdef HAVE_check_stack
1412 if (HAVE_check_stack)
1413 {
38a448ca
RH
1414 rtx last_addr
1415 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1416 stack_pointer_rtx,
1417 plus_constant (size, first)),
1418 NULL_RTX);
edff2491
RK
1419
1420 if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
1421 && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
1422 (last_address, Pmode)))
1423 last_address = copy_to_mode_reg (Pmode, last_address);
1424
1425 emit_insn (gen_check_stack (last_address));
1426 return;
1427 }
1428#endif
1429
1430 /* If we have to generate explicit probes, see if we have a constant
95a086b1 1431 small number of them to generate. If so, that's the easy case. */
e5e809f4
JL
1432 if (GET_CODE (size) == CONST_INT
1433 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
edff2491
RK
1434 {
1435 HOST_WIDE_INT offset;
1436
1437 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1438 for values of N from 1 until it exceeds LAST. If only one
1439 probe is needed, this will not generate any code. Then probe
1440 at LAST. */
1441 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1442 offset < INTVAL (size);
1443 offset = offset + STACK_CHECK_PROBE_INTERVAL)
38a448ca
RH
1444 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1445 stack_pointer_rtx,
1446 GEN_INT (offset)));
edff2491 1447
38a448ca
RH
1448 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1449 stack_pointer_rtx,
1450 plus_constant (size, first)));
edff2491
RK
1451 }
1452
1453 /* In the variable case, do the same as above, but in a loop. We emit loop
1454 notes so that loop optimization can be done. */
1455 else
1456 {
1457 rtx test_addr
38a448ca
RH
1458 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1459 stack_pointer_rtx,
1460 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
edff2491
RK
1461 NULL_RTX);
1462 rtx last_addr
38a448ca
RH
1463 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1464 stack_pointer_rtx,
1465 plus_constant (size, first)),
edff2491
RK
1466 NULL_RTX);
1467 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1468 rtx loop_lab = gen_label_rtx ();
1469 rtx test_lab = gen_label_rtx ();
1470 rtx end_lab = gen_label_rtx ();
1471 rtx temp;
1472
1473 if (GET_CODE (test_addr) != REG
1474 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1475 test_addr = force_reg (Pmode, test_addr);
1476
1477 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1478 emit_jump (test_lab);
1479
1480 emit_label (loop_lab);
1481 emit_stack_probe (test_addr);
1482
1483 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1484
1485#ifdef STACK_GROWS_DOWNWARD
1486#define CMP_OPCODE GTU
1487 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1488 1, OPTAB_WIDEN);
1489#else
1490#define CMP_OPCODE LTU
1491 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1492 1, OPTAB_WIDEN);
1493#endif
1494
1495 if (temp != test_addr)
1496 abort ();
1497
1498 emit_label (test_lab);
c5d5d461
JL
1499 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1500 NULL_RTX, Pmode, 1, 0, loop_lab);
edff2491
RK
1501 emit_jump (end_lab);
1502 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1503 emit_label (end_lab);
1504
38a448ca
RH
1505 /* If will be doing stupid optimization, show test_addr is still live. */
1506 if (obey_regdecls)
1507 emit_insn (gen_rtx_USE (VOIDmode, test_addr));
1508
edff2491
RK
1509 emit_stack_probe (last_addr);
1510 }
1511}
1512\f
18ca7dab
RK
1513/* Return an rtx representing the register or memory location
1514 in which a scalar value of data type VALTYPE
1515 was returned by a function call to function FUNC.
1516 FUNC is a FUNCTION_DECL node if the precise function is known,
1517 otherwise 0. */
1518
1519rtx
1520hard_function_value (valtype, func)
1521 tree valtype;
91813b28 1522 tree func ATTRIBUTE_UNUSED;
18ca7dab 1523{
e1a4071f
JL
1524 rtx val = FUNCTION_VALUE (valtype, func);
1525 if (GET_CODE (val) == REG
1526 && GET_MODE (val) == BLKmode)
1527 {
1528 int bytes = int_size_in_bytes (valtype);
1529 enum machine_mode tmpmode;
1530 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1531 tmpmode != MAX_MACHINE_MODE;
1532 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1533 {
1534 /* Have we found a large enough mode? */
1535 if (GET_MODE_SIZE (tmpmode) >= bytes)
1536 break;
1537 }
1538
1539 /* No suitable mode found. */
1540 if (tmpmode == MAX_MACHINE_MODE)
1541 abort ();
1542
1543 PUT_MODE (val, tmpmode);
1544 }
1545 return val;
18ca7dab
RK
1546}
1547
1548/* Return an rtx representing the register or memory location
1549 in which a scalar value of mode MODE was returned by a library call. */
1550
1551rtx
1552hard_libcall_value (mode)
1553 enum machine_mode mode;
1554{
1555 return LIBCALL_VALUE (mode);
1556}
0c5e217d
RS
1557
1558/* Look up the tree code for a given rtx code
1559 to provide the arithmetic operation for REAL_ARITHMETIC.
1560 The function returns an int because the caller may not know
1561 what `enum tree_code' means. */
1562
1563int
1564rtx_to_tree_code (code)
1565 enum rtx_code code;
1566{
1567 enum tree_code tcode;
1568
1569 switch (code)
1570 {
1571 case PLUS:
1572 tcode = PLUS_EXPR;
1573 break;
1574 case MINUS:
1575 tcode = MINUS_EXPR;
1576 break;
1577 case MULT:
1578 tcode = MULT_EXPR;
1579 break;
1580 case DIV:
1581 tcode = RDIV_EXPR;
1582 break;
1583 case SMIN:
1584 tcode = MIN_EXPR;
1585 break;
1586 case SMAX:
1587 tcode = MAX_EXPR;
1588 break;
1589 default:
1590 tcode = LAST_AND_UNUSED_TREE_CODE;
1591 break;
1592 }
1593 return ((int) tcode);
1594}