]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/explow.c
*** empty log message ***
[thirdparty/gcc.git] / gcc / explow.c
CommitLineData
18ca7dab
RK
1/* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991 Free Software Foundation, Inc.
3
4This file is part of GNU CC.
5
6GNU CC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GNU CC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GNU CC; see the file COPYING. If not, write to
18the Free Software Foundation, 675 Mass Ave, Cambridge, MA 02139, USA. */
19
20
21#include "config.h"
22#include "rtl.h"
23#include "tree.h"
24#include "flags.h"
25#include "expr.h"
26#include "hard-reg-set.h"
27#include "insn-config.h"
28#include "recog.h"
29#include "insn-flags.h"
30#include "insn-codes.h"
31
32/* Return an rtx for the sum of X and the integer C. */
33
34rtx
35plus_constant (x, c)
36 register rtx x;
37 register int c;
38{
39 register RTX_CODE code;
40 register enum machine_mode mode;
41 register rtx tem;
42 int all_constant = 0;
43
44 if (c == 0)
45 return x;
46
47 restart:
48
49 code = GET_CODE (x);
50 mode = GET_MODE (x);
51 switch (code)
52 {
53 case CONST_INT:
54 return gen_rtx (CONST_INT, VOIDmode, (INTVAL (x) + c));
55
56 case CONST_DOUBLE:
57 {
58 int l1 = CONST_DOUBLE_LOW (x);
59 int h1 = CONST_DOUBLE_HIGH (x);
60 int l2 = c;
61 int h2 = c < 0 ? ~0 : 0;
62 int lv, hv;
63
64 add_double (l1, h1, l2, h2, &lv, &hv);
65
66 return immed_double_const (lv, hv, VOIDmode);
67 }
68
69 case MEM:
70 /* If this is a reference to the constant pool, try replacing it with
71 a reference to a new constant. If the resulting address isn't
72 valid, don't return it because we have no way to validize it. */
73 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
74 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
75 {
76 tem
77 = force_const_mem (GET_MODE (x),
78 plus_constant (get_pool_constant (XEXP (x, 0)),
79 c));
80 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
81 return tem;
82 }
83 break;
84
85 case CONST:
86 /* If adding to something entirely constant, set a flag
87 so that we can add a CONST around the result. */
88 x = XEXP (x, 0);
89 all_constant = 1;
90 goto restart;
91
92 case SYMBOL_REF:
93 case LABEL_REF:
94 all_constant = 1;
95 break;
96
97 case PLUS:
98 /* The interesting case is adding the integer to a sum.
99 Look for constant term in the sum and combine
100 with C. For an integer constant term, we make a combined
101 integer. For a constant term that is not an explicit integer,
e5671f2b
RK
102 we cannot really combine, but group them together anyway.
103
104 Use a recursive call in case the remaining operand is something
105 that we handle specially, such as a SYMBOL_REF. */
106
107 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
108 return plus_constant (XEXP (x, 0), c + INTVAL (XEXP (x, 1)));
18ca7dab
RK
109 else if (CONSTANT_P (XEXP (x, 0)))
110 return gen_rtx (PLUS, mode,
111 plus_constant (XEXP (x, 0), c),
112 XEXP (x, 1));
113 else if (CONSTANT_P (XEXP (x, 1)))
114 return gen_rtx (PLUS, mode,
115 XEXP (x, 0),
116 plus_constant (XEXP (x, 1), c));
117 }
118
119 if (c != 0)
120 x = gen_rtx (PLUS, mode, x, gen_rtx (CONST_INT, VOIDmode, c));
121
122 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
123 return x;
124 else if (all_constant)
125 return gen_rtx (CONST, mode, x);
126 else
127 return x;
128}
129
e5671f2b 130/* This is the same as `plus_constant', except that it handles LO_SUM. */
18ca7dab
RK
131
132rtx
133plus_constant_for_output (x, c)
134 register rtx x;
135 register int c;
136{
137 register RTX_CODE code = GET_CODE (x);
138 register enum machine_mode mode = GET_MODE (x);
139 int all_constant = 0;
140
141 if (GET_CODE (x) == LO_SUM)
142 return gen_rtx (LO_SUM, mode, XEXP (x, 0),
143 plus_constant_for_output (XEXP (x, 1), c));
144
145 else
146 return plus_constant (x, c);
147}
148\f
149/* If X is a sum, return a new sum like X but lacking any constant terms.
150 Add all the removed constant terms into *CONSTPTR.
151 X itself is not altered. The result != X if and only if
152 it is not isomorphic to X. */
153
154rtx
155eliminate_constant_term (x, constptr)
156 rtx x;
157 rtx *constptr;
158{
159 register rtx x0, x1;
160 rtx tem;
161
162 if (GET_CODE (x) != PLUS)
163 return x;
164
165 /* First handle constants appearing at this level explicitly. */
166 if (GET_CODE (XEXP (x, 1)) == CONST_INT
167 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
168 XEXP (x, 1)))
169 && GET_CODE (tem) == CONST_INT)
170 {
171 *constptr = tem;
172 return eliminate_constant_term (XEXP (x, 0), constptr);
173 }
174
175 tem = const0_rtx;
176 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
177 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
178 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
179 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
180 *constptr, tem))
181 && GET_CODE (tem) == CONST_INT)
182 {
183 *constptr = tem;
184 return gen_rtx (PLUS, GET_MODE (x), x0, x1);
185 }
186
187 return x;
188}
189
190/* Returns the insn that next references REG after INSN, or 0
191 if REG is clobbered before next referenced or we cannot find
192 an insn that references REG in a straight-line piece of code. */
193
194rtx
195find_next_ref (reg, insn)
196 rtx reg;
197 rtx insn;
198{
199 rtx next;
200
201 for (insn = NEXT_INSN (insn); insn; insn = next)
202 {
203 next = NEXT_INSN (insn);
204 if (GET_CODE (insn) == NOTE)
205 continue;
206 if (GET_CODE (insn) == CODE_LABEL
207 || GET_CODE (insn) == BARRIER)
208 return 0;
209 if (GET_CODE (insn) == INSN
210 || GET_CODE (insn) == JUMP_INSN
211 || GET_CODE (insn) == CALL_INSN)
212 {
213 if (reg_set_p (reg, insn))
214 return 0;
215 if (reg_mentioned_p (reg, PATTERN (insn)))
216 return insn;
217 if (GET_CODE (insn) == JUMP_INSN)
218 {
219 if (simplejump_p (insn))
220 next = JUMP_LABEL (insn);
221 else
222 return 0;
223 }
224 if (GET_CODE (insn) == CALL_INSN
225 && REGNO (reg) < FIRST_PSEUDO_REGISTER
226 && call_used_regs[REGNO (reg)])
227 return 0;
228 }
229 else
230 abort ();
231 }
232 return 0;
233}
234
235/* Return an rtx for the size in bytes of the value of EXP. */
236
237rtx
238expr_size (exp)
239 tree exp;
240{
241 return expand_expr (size_in_bytes (TREE_TYPE (exp)),
242 0, TYPE_MODE (sizetype), 0);
243}
244\f
245/* Return a copy of X in which all memory references
246 and all constants that involve symbol refs
247 have been replaced with new temporary registers.
248 Also emit code to load the memory locations and constants
249 into those registers.
250
251 If X contains no such constants or memory references,
252 X itself (not a copy) is returned.
253
254 If a constant is found in the address that is not a legitimate constant
255 in an insn, it is left alone in the hope that it might be valid in the
256 address.
257
258 X may contain no arithmetic except addition, subtraction and multiplication.
259 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
260
261static rtx
262break_out_memory_refs (x)
263 register rtx x;
264{
265 if (GET_CODE (x) == MEM
266 || (CONSTANT_P (x) && LEGITIMATE_CONSTANT_P (x)
267 && GET_MODE (x) != VOIDmode))
268 {
269 register rtx temp = force_reg (GET_MODE (x), x);
270 mark_reg_pointer (temp);
271 x = temp;
272 }
273 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
274 || GET_CODE (x) == MULT)
275 {
276 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
277 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
278 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
279 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
280 }
281 return x;
282}
283
284/* Given a memory address or facsimile X, construct a new address,
285 currently equivalent, that is stable: future stores won't change it.
286
287 X must be composed of constants, register and memory references
288 combined with addition, subtraction and multiplication:
289 in other words, just what you can get from expand_expr if sum_ok is 1.
290
291 Works by making copies of all regs and memory locations used
292 by X and combining them the same way X does.
293 You could also stabilize the reference to this address
294 by copying the address to a register with copy_to_reg;
295 but then you wouldn't get indexed addressing in the reference. */
296
297rtx
298copy_all_regs (x)
299 register rtx x;
300{
301 if (GET_CODE (x) == REG)
302 {
303 if (REGNO (x) != FRAME_POINTER_REGNUM)
304 x = copy_to_reg (x);
305 }
306 else if (GET_CODE (x) == MEM)
307 x = copy_to_reg (x);
308 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
309 || GET_CODE (x) == MULT)
310 {
311 register rtx op0 = copy_all_regs (XEXP (x, 0));
312 register rtx op1 = copy_all_regs (XEXP (x, 1));
313 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
314 x = gen_rtx (GET_CODE (x), Pmode, op0, op1);
315 }
316 return x;
317}
318\f
319/* Return something equivalent to X but valid as a memory address
320 for something of mode MODE. When X is not itself valid, this
321 works by copying X or subexpressions of it into registers. */
322
323rtx
324memory_address (mode, x)
325 enum machine_mode mode;
326 register rtx x;
327{
328 register rtx oldx;
329
330 /* By passing constant addresses thru registers
331 we get a chance to cse them. */
332 if (! cse_not_expected && CONSTANT_P (x) && LEGITIMATE_CONSTANT_P (x))
333 return force_reg (Pmode, x);
334
335 /* Accept a QUEUED that refers to a REG
336 even though that isn't a valid address.
337 On attempting to put this in an insn we will call protect_from_queue
338 which will turn it into a REG, which is valid. */
339 if (GET_CODE (x) == QUEUED
340 && GET_CODE (QUEUED_VAR (x)) == REG)
341 return x;
342
343 /* We get better cse by rejecting indirect addressing at this stage.
344 Let the combiner create indirect addresses where appropriate.
345 For now, generate the code so that the subexpressions useful to share
346 are visible. But not if cse won't be done! */
347 oldx = x;
348 if (! cse_not_expected && GET_CODE (x) != REG)
349 x = break_out_memory_refs (x);
350
351 /* At this point, any valid address is accepted. */
352 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
353
354 /* If it was valid before but breaking out memory refs invalidated it,
355 use it the old way. */
356 if (memory_address_p (mode, oldx))
357 goto win2;
358
359 /* Perform machine-dependent transformations on X
360 in certain cases. This is not necessary since the code
361 below can handle all possible cases, but machine-dependent
362 transformations can make better code. */
363 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
364
365 /* PLUS and MULT can appear in special ways
366 as the result of attempts to make an address usable for indexing.
367 Usually they are dealt with by calling force_operand, below.
368 But a sum containing constant terms is special
369 if removing them makes the sum a valid address:
370 then we generate that address in a register
371 and index off of it. We do this because it often makes
372 shorter code, and because the addresses thus generated
373 in registers often become common subexpressions. */
374 if (GET_CODE (x) == PLUS)
375 {
376 rtx constant_term = const0_rtx;
377 rtx y = eliminate_constant_term (x, &constant_term);
378 if (constant_term == const0_rtx
379 || ! memory_address_p (mode, y))
380 return force_operand (x, 0);
381
382 y = gen_rtx (PLUS, GET_MODE (x), copy_to_reg (y), constant_term);
383 if (! memory_address_p (mode, y))
384 return force_operand (x, 0);
385 return y;
386 }
387 if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
388 return force_operand (x, 0);
389
390 /* If we have a register that's an invalid address,
391 it must be a hard reg of the wrong class. Copy it to a pseudo. */
392 if (GET_CODE (x) == REG)
393 return copy_to_reg (x);
394
395 /* Last resort: copy the value to a register, since
396 the register is a valid address. */
397 return force_reg (Pmode, x);
398
399 win2:
400 x = oldx;
401 win:
402 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
403 /* Don't copy an addr via a reg if it is one of our stack slots. */
404 && ! (GET_CODE (x) == PLUS
405 && (XEXP (x, 0) == virtual_stack_vars_rtx
406 || XEXP (x, 0) == virtual_incoming_args_rtx)))
407 {
408 if (general_operand (x, Pmode))
409 return force_reg (Pmode, x);
410 else
411 return force_operand (x, 0);
412 }
413 return x;
414}
415
416/* Like `memory_address' but pretend `flag_force_addr' is 0. */
417
418rtx
419memory_address_noforce (mode, x)
420 enum machine_mode mode;
421 rtx x;
422{
423 int ambient_force_addr = flag_force_addr;
424 rtx val;
425
426 flag_force_addr = 0;
427 val = memory_address (mode, x);
428 flag_force_addr = ambient_force_addr;
429 return val;
430}
431
432/* Convert a mem ref into one with a valid memory address.
433 Pass through anything else unchanged. */
434
435rtx
436validize_mem (ref)
437 rtx ref;
438{
439 if (GET_CODE (ref) != MEM)
440 return ref;
441 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
442 return ref;
443 /* Don't alter REF itself, since that is probably a stack slot. */
444 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
445}
446\f
447/* Return a modified copy of X with its memory address copied
448 into a temporary register to protect it from side effects.
449 If X is not a MEM, it is returned unchanged (and not copied).
450 Perhaps even if it is a MEM, if there is no need to change it. */
451
452rtx
453stabilize (x)
454 rtx x;
455{
456 register rtx addr;
457 if (GET_CODE (x) != MEM)
458 return x;
459 addr = XEXP (x, 0);
460 if (rtx_unstable_p (addr))
461 {
462 rtx temp = copy_all_regs (addr);
463 rtx mem;
464 if (GET_CODE (temp) != REG)
465 temp = copy_to_reg (temp);
466 mem = gen_rtx (MEM, GET_MODE (x), temp);
467
468 /* Mark returned memref with in_struct if it's in an array or
469 structure. Copy const and volatile from original memref. */
470
471 MEM_IN_STRUCT_P (mem) = MEM_IN_STRUCT_P (x) || GET_CODE (addr) == PLUS;
472 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
473 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (x);
474 return mem;
475 }
476 return x;
477}
478\f
479/* Copy the value or contents of X to a new temp reg and return that reg. */
480
481rtx
482copy_to_reg (x)
483 rtx x;
484{
485 register rtx temp = gen_reg_rtx (GET_MODE (x));
486
487 /* If not an operand, must be an address with PLUS and MULT so
488 do the computation. */
489 if (! general_operand (x, VOIDmode))
490 x = force_operand (x, temp);
491
492 if (x != temp)
493 emit_move_insn (temp, x);
494
495 return temp;
496}
497
498/* Like copy_to_reg but always give the new register mode Pmode
499 in case X is a constant. */
500
501rtx
502copy_addr_to_reg (x)
503 rtx x;
504{
505 return copy_to_mode_reg (Pmode, x);
506}
507
508/* Like copy_to_reg but always give the new register mode MODE
509 in case X is a constant. */
510
511rtx
512copy_to_mode_reg (mode, x)
513 enum machine_mode mode;
514 rtx x;
515{
516 register rtx temp = gen_reg_rtx (mode);
517
518 /* If not an operand, must be an address with PLUS and MULT so
519 do the computation. */
520 if (! general_operand (x, VOIDmode))
521 x = force_operand (x, temp);
522
523 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
524 abort ();
525 if (x != temp)
526 emit_move_insn (temp, x);
527 return temp;
528}
529
530/* Load X into a register if it is not already one.
531 Use mode MODE for the register.
532 X should be valid for mode MODE, but it may be a constant which
533 is valid for all integer modes; that's why caller must specify MODE.
534
535 The caller must not alter the value in the register we return,
536 since we mark it as a "constant" register. */
537
538rtx
539force_reg (mode, x)
540 enum machine_mode mode;
541 rtx x;
542{
543 register rtx temp, insn;
544
545 if (GET_CODE (x) == REG)
546 return x;
547 temp = gen_reg_rtx (mode);
548 insn = emit_move_insn (temp, x);
549 /* Let optimizers know that TEMP's value never changes
550 and that X can be substituted for it. */
551 if (CONSTANT_P (x))
552 {
553 rtx note = find_reg_note (insn, REG_EQUAL, 0);
554
555 if (note)
556 XEXP (note, 0) = x;
557 else
558 REG_NOTES (insn) = gen_rtx (EXPR_LIST, REG_EQUAL, x, REG_NOTES (insn));
559 }
560 return temp;
561}
562
563/* If X is a memory ref, copy its contents to a new temp reg and return
564 that reg. Otherwise, return X. */
565
566rtx
567force_not_mem (x)
568 rtx x;
569{
570 register rtx temp;
571 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
572 return x;
573 temp = gen_reg_rtx (GET_MODE (x));
574 emit_move_insn (temp, x);
575 return temp;
576}
577
578/* Copy X to TARGET (if it's nonzero and a reg)
579 or to a new temp reg and return that reg.
580 MODE is the mode to use for X in case it is a constant. */
581
582rtx
583copy_to_suggested_reg (x, target, mode)
584 rtx x, target;
585 enum machine_mode mode;
586{
587 register rtx temp;
588
589 if (target && GET_CODE (target) == REG)
590 temp = target;
591 else
592 temp = gen_reg_rtx (mode);
593
594 emit_move_insn (temp, x);
595 return temp;
596}
597\f
598/* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
599 This pops when ADJUST is positive. ADJUST need not be constant. */
600
601void
602adjust_stack (adjust)
603 rtx adjust;
604{
605 rtx temp;
606 adjust = protect_from_queue (adjust, 0);
607
608 if (adjust == const0_rtx)
609 return;
610
611 temp = expand_binop (Pmode,
612#ifdef STACK_GROWS_DOWNWARD
613 add_optab,
614#else
615 sub_optab,
616#endif
617 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
618 OPTAB_LIB_WIDEN);
619
620 if (temp != stack_pointer_rtx)
621 emit_move_insn (stack_pointer_rtx, temp);
622}
623
624/* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
625 This pushes when ADJUST is positive. ADJUST need not be constant. */
626
627void
628anti_adjust_stack (adjust)
629 rtx adjust;
630{
631 rtx temp;
632 adjust = protect_from_queue (adjust, 0);
633
634 if (adjust == const0_rtx)
635 return;
636
637 temp = expand_binop (Pmode,
638#ifdef STACK_GROWS_DOWNWARD
639 sub_optab,
640#else
641 add_optab,
642#endif
643 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
644 OPTAB_LIB_WIDEN);
645
646 if (temp != stack_pointer_rtx)
647 emit_move_insn (stack_pointer_rtx, temp);
648}
649
650/* Round the size of a block to be pushed up to the boundary required
651 by this machine. SIZE is the desired size, which need not be constant. */
652
653rtx
654round_push (size)
655 rtx size;
656{
657#ifdef STACK_BOUNDARY
658 int align = STACK_BOUNDARY / BITS_PER_UNIT;
659 if (align == 1)
660 return size;
661 if (GET_CODE (size) == CONST_INT)
662 {
663 int new = (INTVAL (size) + align - 1) / align * align;
664 if (INTVAL (size) != new)
665 size = gen_rtx (CONST_INT, VOIDmode, new);
666 }
667 else
668 {
669 size = expand_divmod (0, CEIL_DIV_EXPR, Pmode, size,
670 gen_rtx (CONST_INT, VOIDmode, align),
671 0, 1);
672 size = expand_mult (Pmode, size,
673 gen_rtx (CONST_INT, VOIDmode, align),
674 0, 1);
675 }
676#endif /* STACK_BOUNDARY */
677 return size;
678}
679\f
59257ff7
RK
680/* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
681 to a previously-created save area. If no save area has been allocated,
682 this function will allocate one. If a save area is specified, it
683 must be of the proper mode.
684
685 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
686 are emitted at the current position. */
687
688void
689emit_stack_save (save_level, psave, after)
690 enum save_level save_level;
691 rtx *psave;
692 rtx after;
693{
694 rtx sa = *psave;
695 /* The default is that we use a move insn and save in a Pmode object. */
696 rtx (*fcn) () = gen_move_insn;
697 enum machine_mode mode = Pmode;
698
699 /* See if this machine has anything special to do for this kind of save. */
700 switch (save_level)
701 {
702#ifdef HAVE_save_stack_block
703 case SAVE_BLOCK:
704 if (HAVE_save_stack_block)
705 {
706 fcn = gen_save_stack_block;
707 mode = insn_operand_mode[CODE_FOR_save_stack_block][0];
708 }
709 break;
710#endif
711#ifdef HAVE_save_stack_function
712 case SAVE_FUNCTION:
713 if (HAVE_save_stack_function)
714 {
715 fcn = gen_save_stack_function;
716 mode = insn_operand_mode[CODE_FOR_save_stack_function][0];
717 }
718 break;
719#endif
720#ifdef HAVE_save_stack_nonlocal
721 case SAVE_NONLOCAL:
722 if (HAVE_save_stack_nonlocal)
723 {
724 fcn = gen_save_stack_nonlocal;
725 mode = insn_operand_mode[CODE_FOR_save_stack_nonlocal][0];
726 }
727 break;
728#endif
729 }
730
731 /* If there is no save area and we have to allocate one, do so. Otherwise
732 verify the save area is the proper mode. */
733
734 if (sa == 0)
735 {
736 if (mode != VOIDmode)
737 {
738 if (save_level == SAVE_NONLOCAL)
739 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
740 else
741 *psave = sa = gen_reg_rtx (mode);
742 }
743 }
744 else
745 {
746 if (mode == VOIDmode || GET_MODE (sa) != mode)
747 abort ();
748 }
749
d072107f
RK
750 if (sa != 0)
751 sa = validize_mem (sa);
752
59257ff7 753 if (after)
700f6f98
RK
754 {
755 rtx seq;
756
757 start_sequence ();
d072107f 758 emit_insn (fcn (sa, stack_pointer_rtx));
700f6f98
RK
759 seq = gen_sequence ();
760 end_sequence ();
761 emit_insn_after (seq, after);
762 }
59257ff7 763 else
d072107f 764 emit_insn (fcn (sa, stack_pointer_rtx));
59257ff7
RK
765}
766
767/* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
768 area made by emit_stack_save. If it is zero, we have nothing to do.
769
770 Put any emitted insns after insn AFTER, if nonzero, otherwise at
771 current position. */
772
773void
774emit_stack_restore (save_level, sa, after)
775 enum save_level save_level;
776 rtx after;
777 rtx sa;
778{
779 /* The default is that we use a move insn. */
780 rtx (*fcn) () = gen_move_insn;
781
782 /* See if this machine has anything special to do for this kind of save. */
783 switch (save_level)
784 {
785#ifdef HAVE_restore_stack_block
786 case SAVE_BLOCK:
787 if (HAVE_restore_stack_block)
788 fcn = gen_restore_stack_block;
789 break;
790#endif
791#ifdef HAVE_restore_stack_function
792 case SAVE_FUNCTION:
793 if (HAVE_restore_stack_function)
794 fcn = gen_restore_stack_function;
795 break;
796#endif
797#ifdef HAVE_restore_stack_nonlocal
798
799 case SAVE_NONLOCAL:
800 if (HAVE_restore_stack_nonlocal)
801 fcn = gen_restore_stack_nonlocal;
802 break;
803#endif
804 }
805
d072107f
RK
806 if (sa != 0)
807 sa = validize_mem (sa);
808
59257ff7 809 if (after)
700f6f98
RK
810 {
811 rtx seq;
812
813 start_sequence ();
d072107f 814 emit_insn (fcn (stack_pointer_rtx, sa));
700f6f98
RK
815 seq = gen_sequence ();
816 end_sequence ();
817 emit_insn_after (seq, after);
818 }
59257ff7 819 else
d072107f 820 emit_insn (fcn (stack_pointer_rtx, sa));
59257ff7
RK
821}
822\f
18ca7dab
RK
823/* Return an rtx representing the address of an area of memory dynamically
824 pushed on the stack. This region of memory is always aligned to
825 a multiple of BIGGEST_ALIGNMENT.
826
827 Any required stack pointer alignment is preserved.
828
829 SIZE is an rtx representing the size of the area.
091ad0b9
RK
830 TARGET is a place in which the address can be placed.
831
832 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
18ca7dab
RK
833
834rtx
091ad0b9 835allocate_dynamic_stack_space (size, target, known_align)
18ca7dab
RK
836 rtx size;
837 rtx target;
091ad0b9 838 int known_align;
18ca7dab
RK
839{
840 /* Ensure the size is in the proper mode. */
841 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
842 size = convert_to_mode (Pmode, size, 1);
843
844 /* We will need to ensure that the address we return is aligned to
845 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
846 always know its final value at this point in the compilation (it
847 might depend on the size of the outgoing parameter lists, for
848 example), so we must align the value to be returned in that case.
849 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
850 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
851 We must also do an alignment operation on the returned value if
852 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
853
854 If we have to align, we must leave space in SIZE for the hole
855 that might result from the alignment operation. */
856
857#if defined (STACK_DYNAMIC_OFFSET) || defined(STACK_POINTER_OFFSET) || defined (ALLOCATE_OUTGOING_ARGS)
858#define MUST_ALIGN
859#endif
860
861#if ! defined (MUST_ALIGN) && (!defined(STACK_BOUNDARY) || STACK_BOUNDARY < BIGGEST_ALIGNMENT)
862#define MUST_ALIGN
863#endif
864
865#ifdef MUST_ALIGN
866
3b998c11
RK
867 if (known_align % BIGGEST_ALIGNMENT != 0)
868 {
869 if (GET_CODE (size) == CONST_INT)
870 size = gen_rtx (CONST_INT, VOIDmode,
871 (INTVAL (size)
872 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1)));
873 else
874 size = expand_binop (Pmode, add_optab, size,
875 gen_rtx (CONST_INT, VOIDmode,
876 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
877 0, 1, OPTAB_LIB_WIDEN);
878 }
18ca7dab
RK
879#endif
880
881#ifdef SETJMP_VIA_SAVE_AREA
882 /* If setjmp restores regs from a save area in the stack frame,
883 avoid clobbering the reg save area. Note that the offset of
884 virtual_incoming_args_rtx includes the preallocated stack args space.
885 It would be no problem to clobber that, but it's on the wrong side
886 of the old save area. */
887 {
888 rtx dynamic_offset
889 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
890 stack_pointer_rtx, 0, 1, OPTAB_LIB_WIDEN);
891 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
892 0, 1, OPTAB_LIB_WIDEN);
893 }
894#endif /* SETJMP_VIA_SAVE_AREA */
895
896 /* Round the size to a multiple of the required stack alignment.
897 Since the stack if presumed to be rounded before this allocation,
898 this will maintain the required alignment.
899
900 If the stack grows downward, we could save an insn by subtracting
901 SIZE from the stack pointer and then aligning the stack pointer.
902 The problem with this is that the stack pointer may be unaligned
903 between the execution of the subtraction and alignment insns and
904 some machines do not allow this. Even on those that do, some
905 signal handlers malfunction if a signal should occur between those
906 insns. Since this is an extremely rare event, we have no reliable
907 way of knowing which systems have this problem. So we avoid even
908 momentarily mis-aligning the stack. */
909
89d825c9 910#ifdef STACK_BOUNDARY
091ad0b9
RK
911 if (known_align % STACK_BOUNDARY != 0)
912 size = round_push (size);
89d825c9 913#endif
18ca7dab
RK
914
915 do_pending_stack_adjust ();
916
091ad0b9
RK
917 /* Don't use a TARGET that isn't a pseudo. */
918 if (target == 0 || GET_CODE (target) != REG
919 || REGNO (target) < FIRST_PSEUDO_REGISTER)
18ca7dab
RK
920 target = gen_reg_rtx (Pmode);
921
3ad69266
RS
922 mark_reg_pointer (target);
923
18ca7dab
RK
924#ifndef STACK_GROWS_DOWNWARD
925 emit_move_insn (target, virtual_stack_dynamic_rtx);
926#endif
927
928 /* Perform the required allocation from the stack. Some systems do
929 this differently than simply incrementing/decrementing from the
930 stack pointer. */
931#ifdef HAVE_allocate_stack
932 if (HAVE_allocate_stack)
933 {
934 enum machine_mode mode
935 = insn_operand_mode[(int) CODE_FOR_allocate_stack][0];
936
937 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
938 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
939 (size, mode)))
940 size = copy_to_mode_reg (mode, size);
941
942 emit_insn (gen_allocate_stack (size));
943 }
944 else
945#endif
946 anti_adjust_stack (size);
947
948#ifdef STACK_GROWS_DOWNWARD
949 emit_move_insn (target, virtual_stack_dynamic_rtx);
950#endif
951
952#ifdef MUST_ALIGN
091ad0b9
RK
953 if (known_align % BIGGEST_ALIGNMENT != 0)
954 {
955 target = expand_divmod (0, CEIL_DIV_EXPR, Pmode, target,
956 gen_rtx (CONST_INT, VOIDmode,
957 BIGGEST_ALIGNMENT / BITS_PER_UNIT),
958 0, 1);
959
960 target = expand_mult (Pmode, target,
961 gen_rtx (CONST_INT, VOIDmode,
962 BIGGEST_ALIGNMENT / BITS_PER_UNIT),
963 0, 1);
964 }
18ca7dab
RK
965#endif
966
967 /* Some systems require a particular insn to refer to the stack
968 to make the pages exist. */
969#ifdef HAVE_probe
970 if (HAVE_probe)
971 emit_insn (gen_probe ());
972#endif
973
974 return target;
975}
976\f
977/* Return an rtx representing the register or memory location
978 in which a scalar value of data type VALTYPE
979 was returned by a function call to function FUNC.
980 FUNC is a FUNCTION_DECL node if the precise function is known,
981 otherwise 0. */
982
983rtx
984hard_function_value (valtype, func)
985 tree valtype;
986 tree func;
987{
988 return FUNCTION_VALUE (valtype, func);
989}
990
991/* Return an rtx representing the register or memory location
992 in which a scalar value of mode MODE was returned by a library call. */
993
994rtx
995hard_libcall_value (mode)
996 enum machine_mode mode;
997{
998 return LIBCALL_VALUE (mode);
999}