]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
Merge in gcc2-ss-010999
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 91, 94-97, 1998, 1999 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "toplev.h"
25 #include "rtl.h"
26 #include "tree.h"
27 #include "flags.h"
28 #include "function.h"
29 #include "expr.h"
30 #include "hard-reg-set.h"
31 #include "insn-config.h"
32 #include "recog.h"
33 #include "insn-flags.h"
34 #include "insn-codes.h"
35
36 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
37 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 #endif
39
40 static rtx break_out_memory_refs PROTO((rtx));
41 static void emit_stack_probe PROTO((rtx));
42
43
44 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
45
46 HOST_WIDE_INT
47 trunc_int_for_mode (c, mode)
48 HOST_WIDE_INT c;
49 enum machine_mode mode;
50 {
51 int width = GET_MODE_BITSIZE (mode);
52
53 /* We clear out all bits that don't belong in MODE, unless they and our
54 sign bit are all one. So we get either a reasonable negative
55 value or a reasonable unsigned value. */
56
57 if (width < HOST_BITS_PER_WIDE_INT
58 && ((c & ((HOST_WIDE_INT) (-1) << (width - 1)))
59 != ((HOST_WIDE_INT) (-1) << (width - 1))))
60 c &= ((HOST_WIDE_INT) 1 << width) - 1;
61
62 /* If this would be an entire word for the target, but is not for
63 the host, then sign-extend on the host so that the number will look
64 the same way on the host that it would on the target.
65
66 For example, when building a 64 bit alpha hosted 32 bit sparc
67 targeted compiler, then we want the 32 bit unsigned value -1 to be
68 represented as a 64 bit value -1, and not as 0x00000000ffffffff.
69 The later confuses the sparc backend. */
70
71 if (BITS_PER_WORD < HOST_BITS_PER_WIDE_INT
72 && BITS_PER_WORD == width
73 && (c & ((HOST_WIDE_INT) 1 << (width - 1))))
74 c |= ((HOST_WIDE_INT) (-1) << width);
75
76 return c;
77 }
78
79 /* Return an rtx for the sum of X and the integer C.
80
81 This function should be used via the `plus_constant' macro. */
82
83 rtx
84 plus_constant_wide (x, c)
85 register rtx x;
86 register HOST_WIDE_INT c;
87 {
88 register RTX_CODE code;
89 register enum machine_mode mode;
90 register rtx tem;
91 int all_constant = 0;
92
93 if (c == 0)
94 return x;
95
96 restart:
97
98 code = GET_CODE (x);
99 mode = GET_MODE (x);
100 switch (code)
101 {
102 case CONST_INT:
103 return GEN_INT (INTVAL (x) + c);
104
105 case CONST_DOUBLE:
106 {
107 HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
108 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
109 HOST_WIDE_INT l2 = c;
110 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
111 HOST_WIDE_INT lv, hv;
112
113 add_double (l1, h1, l2, h2, &lv, &hv);
114
115 return immed_double_const (lv, hv, VOIDmode);
116 }
117
118 case MEM:
119 /* If this is a reference to the constant pool, try replacing it with
120 a reference to a new constant. If the resulting address isn't
121 valid, don't return it because we have no way to validize it. */
122 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
123 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
124 {
125 /* Any rtl we create here must go in a saveable obstack, since
126 we might have been called from within combine. */
127 push_obstacks_nochange ();
128 rtl_in_saveable_obstack ();
129 tem
130 = force_const_mem (GET_MODE (x),
131 plus_constant (get_pool_constant (XEXP (x, 0)),
132 c));
133 pop_obstacks ();
134 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
135 return tem;
136 }
137 break;
138
139 case CONST:
140 /* If adding to something entirely constant, set a flag
141 so that we can add a CONST around the result. */
142 x = XEXP (x, 0);
143 all_constant = 1;
144 goto restart;
145
146 case SYMBOL_REF:
147 case LABEL_REF:
148 all_constant = 1;
149 break;
150
151 case PLUS:
152 /* The interesting case is adding the integer to a sum.
153 Look for constant term in the sum and combine
154 with C. For an integer constant term, we make a combined
155 integer. For a constant term that is not an explicit integer,
156 we cannot really combine, but group them together anyway.
157
158 Restart or use a recursive call in case the remaining operand is
159 something that we handle specially, such as a SYMBOL_REF.
160
161 We may not immediately return from the recursive call here, lest
162 all_constant gets lost. */
163
164 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
165 {
166 c += INTVAL (XEXP (x, 1));
167
168 if (GET_MODE (x) != VOIDmode)
169 c = trunc_int_for_mode (c, GET_MODE (x));
170
171 x = XEXP (x, 0);
172 goto restart;
173 }
174 else if (CONSTANT_P (XEXP (x, 0)))
175 {
176 x = gen_rtx_PLUS (mode,
177 plus_constant (XEXP (x, 0), c),
178 XEXP (x, 1));
179 c = 0;
180 }
181 else if (CONSTANT_P (XEXP (x, 1)))
182 {
183 x = gen_rtx_PLUS (mode,
184 XEXP (x, 0),
185 plus_constant (XEXP (x, 1), c));
186 c = 0;
187 }
188 break;
189
190 default:
191 break;
192 }
193
194 if (c != 0)
195 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
196
197 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
198 return x;
199 else if (all_constant)
200 return gen_rtx_CONST (mode, x);
201 else
202 return x;
203 }
204
205 /* This is the same as `plus_constant', except that it handles LO_SUM.
206
207 This function should be used via the `plus_constant_for_output' macro. */
208
209 rtx
210 plus_constant_for_output_wide (x, c)
211 register rtx x;
212 register HOST_WIDE_INT c;
213 {
214 register enum machine_mode mode = GET_MODE (x);
215
216 if (GET_CODE (x) == LO_SUM)
217 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
218 plus_constant_for_output (XEXP (x, 1), c));
219
220 else
221 return plus_constant (x, c);
222 }
223 \f
224 /* If X is a sum, return a new sum like X but lacking any constant terms.
225 Add all the removed constant terms into *CONSTPTR.
226 X itself is not altered. The result != X if and only if
227 it is not isomorphic to X. */
228
229 rtx
230 eliminate_constant_term (x, constptr)
231 rtx x;
232 rtx *constptr;
233 {
234 register rtx x0, x1;
235 rtx tem;
236
237 if (GET_CODE (x) != PLUS)
238 return x;
239
240 /* First handle constants appearing at this level explicitly. */
241 if (GET_CODE (XEXP (x, 1)) == CONST_INT
242 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
243 XEXP (x, 1)))
244 && GET_CODE (tem) == CONST_INT)
245 {
246 *constptr = tem;
247 return eliminate_constant_term (XEXP (x, 0), constptr);
248 }
249
250 tem = const0_rtx;
251 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
252 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
253 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
254 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
255 *constptr, tem))
256 && GET_CODE (tem) == CONST_INT)
257 {
258 *constptr = tem;
259 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
260 }
261
262 return x;
263 }
264
265 /* Returns the insn that next references REG after INSN, or 0
266 if REG is clobbered before next referenced or we cannot find
267 an insn that references REG in a straight-line piece of code. */
268
269 rtx
270 find_next_ref (reg, insn)
271 rtx reg;
272 rtx insn;
273 {
274 rtx next;
275
276 for (insn = NEXT_INSN (insn); insn; insn = next)
277 {
278 next = NEXT_INSN (insn);
279 if (GET_CODE (insn) == NOTE)
280 continue;
281 if (GET_CODE (insn) == CODE_LABEL
282 || GET_CODE (insn) == BARRIER)
283 return 0;
284 if (GET_CODE (insn) == INSN
285 || GET_CODE (insn) == JUMP_INSN
286 || GET_CODE (insn) == CALL_INSN)
287 {
288 if (reg_set_p (reg, insn))
289 return 0;
290 if (reg_mentioned_p (reg, PATTERN (insn)))
291 return insn;
292 if (GET_CODE (insn) == JUMP_INSN)
293 {
294 if (simplejump_p (insn))
295 next = JUMP_LABEL (insn);
296 else
297 return 0;
298 }
299 if (GET_CODE (insn) == CALL_INSN
300 && REGNO (reg) < FIRST_PSEUDO_REGISTER
301 && call_used_regs[REGNO (reg)])
302 return 0;
303 }
304 else
305 abort ();
306 }
307 return 0;
308 }
309
310 /* Return an rtx for the size in bytes of the value of EXP. */
311
312 rtx
313 expr_size (exp)
314 tree exp;
315 {
316 tree size = size_in_bytes (TREE_TYPE (exp));
317
318 if (TREE_CODE (size) != INTEGER_CST
319 && contains_placeholder_p (size))
320 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
321
322 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
323 EXPAND_MEMORY_USE_BAD);
324 }
325 \f
326 /* Return a copy of X in which all memory references
327 and all constants that involve symbol refs
328 have been replaced with new temporary registers.
329 Also emit code to load the memory locations and constants
330 into those registers.
331
332 If X contains no such constants or memory references,
333 X itself (not a copy) is returned.
334
335 If a constant is found in the address that is not a legitimate constant
336 in an insn, it is left alone in the hope that it might be valid in the
337 address.
338
339 X may contain no arithmetic except addition, subtraction and multiplication.
340 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
341
342 static rtx
343 break_out_memory_refs (x)
344 register rtx x;
345 {
346 if (GET_CODE (x) == MEM
347 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
348 && GET_MODE (x) != VOIDmode))
349 x = force_reg (GET_MODE (x), x);
350 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
351 || GET_CODE (x) == MULT)
352 {
353 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
354 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
355
356 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
357 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
358 }
359
360 return x;
361 }
362
363 #ifdef POINTERS_EXTEND_UNSIGNED
364
365 /* Given X, a memory address in ptr_mode, convert it to an address
366 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
367 the fact that pointers are not allowed to overflow by commuting arithmetic
368 operations over conversions so that address arithmetic insns can be
369 used. */
370
371 rtx
372 convert_memory_address (to_mode, x)
373 enum machine_mode to_mode;
374 rtx x;
375 {
376 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
377 rtx temp;
378
379 /* Here we handle some special cases. If none of them apply, fall through
380 to the default case. */
381 switch (GET_CODE (x))
382 {
383 case CONST_INT:
384 case CONST_DOUBLE:
385 return x;
386
387 case LABEL_REF:
388 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
389 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
390 return temp;
391
392 case SYMBOL_REF:
393 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
394 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
395 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
396 return temp;
397
398 case CONST:
399 return gen_rtx_CONST (to_mode,
400 convert_memory_address (to_mode, XEXP (x, 0)));
401
402 case PLUS:
403 case MULT:
404 /* For addition the second operand is a small constant, we can safely
405 permute the conversion and addition operation. We can always safely
406 permute them if we are making the address narrower. In addition,
407 always permute the operations if this is a constant. */
408 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
409 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
410 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
411 || CONSTANT_P (XEXP (x, 0)))))
412 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
413 convert_memory_address (to_mode, XEXP (x, 0)),
414 convert_memory_address (to_mode, XEXP (x, 1)));
415 break;
416
417 default:
418 break;
419 }
420
421 return convert_modes (to_mode, from_mode,
422 x, POINTERS_EXTEND_UNSIGNED);
423 }
424 #endif
425
426 /* Given a memory address or facsimile X, construct a new address,
427 currently equivalent, that is stable: future stores won't change it.
428
429 X must be composed of constants, register and memory references
430 combined with addition, subtraction and multiplication:
431 in other words, just what you can get from expand_expr if sum_ok is 1.
432
433 Works by making copies of all regs and memory locations used
434 by X and combining them the same way X does.
435 You could also stabilize the reference to this address
436 by copying the address to a register with copy_to_reg;
437 but then you wouldn't get indexed addressing in the reference. */
438
439 rtx
440 copy_all_regs (x)
441 register rtx x;
442 {
443 if (GET_CODE (x) == REG)
444 {
445 if (REGNO (x) != FRAME_POINTER_REGNUM
446 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
447 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
448 #endif
449 )
450 x = copy_to_reg (x);
451 }
452 else if (GET_CODE (x) == MEM)
453 x = copy_to_reg (x);
454 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
455 || GET_CODE (x) == MULT)
456 {
457 register rtx op0 = copy_all_regs (XEXP (x, 0));
458 register rtx op1 = copy_all_regs (XEXP (x, 1));
459 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
460 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
461 }
462 return x;
463 }
464 \f
465 /* Return something equivalent to X but valid as a memory address
466 for something of mode MODE. When X is not itself valid, this
467 works by copying X or subexpressions of it into registers. */
468
469 rtx
470 memory_address (mode, x)
471 enum machine_mode mode;
472 register rtx x;
473 {
474 register rtx oldx = x;
475
476 if (GET_CODE (x) == ADDRESSOF)
477 return x;
478
479 #ifdef POINTERS_EXTEND_UNSIGNED
480 if (GET_MODE (x) == ptr_mode)
481 x = convert_memory_address (Pmode, x);
482 #endif
483
484 /* By passing constant addresses thru registers
485 we get a chance to cse them. */
486 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
487 x = force_reg (Pmode, x);
488
489 /* Accept a QUEUED that refers to a REG
490 even though that isn't a valid address.
491 On attempting to put this in an insn we will call protect_from_queue
492 which will turn it into a REG, which is valid. */
493 else if (GET_CODE (x) == QUEUED
494 && GET_CODE (QUEUED_VAR (x)) == REG)
495 ;
496
497 /* We get better cse by rejecting indirect addressing at this stage.
498 Let the combiner create indirect addresses where appropriate.
499 For now, generate the code so that the subexpressions useful to share
500 are visible. But not if cse won't be done! */
501 else
502 {
503 if (! cse_not_expected && GET_CODE (x) != REG)
504 x = break_out_memory_refs (x);
505
506 /* At this point, any valid address is accepted. */
507 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
508
509 /* If it was valid before but breaking out memory refs invalidated it,
510 use it the old way. */
511 if (memory_address_p (mode, oldx))
512 goto win2;
513
514 /* Perform machine-dependent transformations on X
515 in certain cases. This is not necessary since the code
516 below can handle all possible cases, but machine-dependent
517 transformations can make better code. */
518 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
519
520 /* PLUS and MULT can appear in special ways
521 as the result of attempts to make an address usable for indexing.
522 Usually they are dealt with by calling force_operand, below.
523 But a sum containing constant terms is special
524 if removing them makes the sum a valid address:
525 then we generate that address in a register
526 and index off of it. We do this because it often makes
527 shorter code, and because the addresses thus generated
528 in registers often become common subexpressions. */
529 if (GET_CODE (x) == PLUS)
530 {
531 rtx constant_term = const0_rtx;
532 rtx y = eliminate_constant_term (x, &constant_term);
533 if (constant_term == const0_rtx
534 || ! memory_address_p (mode, y))
535 x = force_operand (x, NULL_RTX);
536 else
537 {
538 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
539 if (! memory_address_p (mode, y))
540 x = force_operand (x, NULL_RTX);
541 else
542 x = y;
543 }
544 }
545
546 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
547 x = force_operand (x, NULL_RTX);
548
549 /* If we have a register that's an invalid address,
550 it must be a hard reg of the wrong class. Copy it to a pseudo. */
551 else if (GET_CODE (x) == REG)
552 x = copy_to_reg (x);
553
554 /* Last resort: copy the value to a register, since
555 the register is a valid address. */
556 else
557 x = force_reg (Pmode, x);
558
559 goto done;
560
561 win2:
562 x = oldx;
563 win:
564 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
565 /* Don't copy an addr via a reg if it is one of our stack slots. */
566 && ! (GET_CODE (x) == PLUS
567 && (XEXP (x, 0) == virtual_stack_vars_rtx
568 || XEXP (x, 0) == virtual_incoming_args_rtx)))
569 {
570 if (general_operand (x, Pmode))
571 x = force_reg (Pmode, x);
572 else
573 x = force_operand (x, NULL_RTX);
574 }
575 }
576
577 done:
578
579 /* If we didn't change the address, we are done. Otherwise, mark
580 a reg as a pointer if we have REG or REG + CONST_INT. */
581 if (oldx == x)
582 return x;
583 else if (GET_CODE (x) == REG)
584 mark_reg_pointer (x, 1);
585 else if (GET_CODE (x) == PLUS
586 && GET_CODE (XEXP (x, 0)) == REG
587 && GET_CODE (XEXP (x, 1)) == CONST_INT)
588 mark_reg_pointer (XEXP (x, 0), 1);
589
590 /* OLDX may have been the address on a temporary. Update the address
591 to indicate that X is now used. */
592 update_temp_slot_address (oldx, x);
593
594 return x;
595 }
596
597 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
598
599 rtx
600 memory_address_noforce (mode, x)
601 enum machine_mode mode;
602 rtx x;
603 {
604 int ambient_force_addr = flag_force_addr;
605 rtx val;
606
607 flag_force_addr = 0;
608 val = memory_address (mode, x);
609 flag_force_addr = ambient_force_addr;
610 return val;
611 }
612
613 /* Convert a mem ref into one with a valid memory address.
614 Pass through anything else unchanged. */
615
616 rtx
617 validize_mem (ref)
618 rtx ref;
619 {
620 if (GET_CODE (ref) != MEM)
621 return ref;
622 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
623 return ref;
624 /* Don't alter REF itself, since that is probably a stack slot. */
625 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
626 }
627 \f
628 /* Return a modified copy of X with its memory address copied
629 into a temporary register to protect it from side effects.
630 If X is not a MEM, it is returned unchanged (and not copied).
631 Perhaps even if it is a MEM, if there is no need to change it. */
632
633 rtx
634 stabilize (x)
635 rtx x;
636 {
637 register rtx addr;
638 if (GET_CODE (x) != MEM)
639 return x;
640 addr = XEXP (x, 0);
641 if (rtx_unstable_p (addr))
642 {
643 rtx temp = copy_all_regs (addr);
644 rtx mem;
645 if (GET_CODE (temp) != REG)
646 temp = copy_to_reg (temp);
647 mem = gen_rtx_MEM (GET_MODE (x), temp);
648
649 /* Mark returned memref with in_struct if it's in an array or
650 structure. Copy const and volatile from original memref. */
651
652 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
653 MEM_COPY_ATTRIBUTES (mem, x);
654 if (GET_CODE (addr) == PLUS)
655 MEM_SET_IN_STRUCT_P (mem, 1);
656
657 /* Since the new MEM is just like the old X, it can alias only
658 the things that X could. */
659 MEM_ALIAS_SET (mem) = MEM_ALIAS_SET (x);
660
661 return mem;
662 }
663 return x;
664 }
665 \f
666 /* Copy the value or contents of X to a new temp reg and return that reg. */
667
668 rtx
669 copy_to_reg (x)
670 rtx x;
671 {
672 register rtx temp = gen_reg_rtx (GET_MODE (x));
673
674 /* If not an operand, must be an address with PLUS and MULT so
675 do the computation. */
676 if (! general_operand (x, VOIDmode))
677 x = force_operand (x, temp);
678
679 if (x != temp)
680 emit_move_insn (temp, x);
681
682 return temp;
683 }
684
685 /* Like copy_to_reg but always give the new register mode Pmode
686 in case X is a constant. */
687
688 rtx
689 copy_addr_to_reg (x)
690 rtx x;
691 {
692 return copy_to_mode_reg (Pmode, x);
693 }
694
695 /* Like copy_to_reg but always give the new register mode MODE
696 in case X is a constant. */
697
698 rtx
699 copy_to_mode_reg (mode, x)
700 enum machine_mode mode;
701 rtx x;
702 {
703 register rtx temp = gen_reg_rtx (mode);
704
705 /* If not an operand, must be an address with PLUS and MULT so
706 do the computation. */
707 if (! general_operand (x, VOIDmode))
708 x = force_operand (x, temp);
709
710 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
711 abort ();
712 if (x != temp)
713 emit_move_insn (temp, x);
714 return temp;
715 }
716
717 /* Load X into a register if it is not already one.
718 Use mode MODE for the register.
719 X should be valid for mode MODE, but it may be a constant which
720 is valid for all integer modes; that's why caller must specify MODE.
721
722 The caller must not alter the value in the register we return,
723 since we mark it as a "constant" register. */
724
725 rtx
726 force_reg (mode, x)
727 enum machine_mode mode;
728 rtx x;
729 {
730 register rtx temp, insn, set;
731
732 if (GET_CODE (x) == REG)
733 return x;
734
735 temp = gen_reg_rtx (mode);
736
737 if (! general_operand (x, mode))
738 x = force_operand (x, NULL_RTX);
739
740 insn = emit_move_insn (temp, x);
741
742 /* Let optimizers know that TEMP's value never changes
743 and that X can be substituted for it. Don't get confused
744 if INSN set something else (such as a SUBREG of TEMP). */
745 if (CONSTANT_P (x)
746 && (set = single_set (insn)) != 0
747 && SET_DEST (set) == temp)
748 {
749 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
750
751 if (note)
752 XEXP (note, 0) = x;
753 else
754 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
755 }
756 return temp;
757 }
758
759 /* If X is a memory ref, copy its contents to a new temp reg and return
760 that reg. Otherwise, return X. */
761
762 rtx
763 force_not_mem (x)
764 rtx x;
765 {
766 register rtx temp;
767 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
768 return x;
769 temp = gen_reg_rtx (GET_MODE (x));
770 emit_move_insn (temp, x);
771 return temp;
772 }
773
774 /* Copy X to TARGET (if it's nonzero and a reg)
775 or to a new temp reg and return that reg.
776 MODE is the mode to use for X in case it is a constant. */
777
778 rtx
779 copy_to_suggested_reg (x, target, mode)
780 rtx x, target;
781 enum machine_mode mode;
782 {
783 register rtx temp;
784
785 if (target && GET_CODE (target) == REG)
786 temp = target;
787 else
788 temp = gen_reg_rtx (mode);
789
790 emit_move_insn (temp, x);
791 return temp;
792 }
793 \f
794 /* Return the mode to use to store a scalar of TYPE and MODE.
795 PUNSIGNEDP points to the signedness of the type and may be adjusted
796 to show what signedness to use on extension operations.
797
798 FOR_CALL is non-zero if this call is promoting args for a call. */
799
800 enum machine_mode
801 promote_mode (type, mode, punsignedp, for_call)
802 tree type;
803 enum machine_mode mode;
804 int *punsignedp;
805 int for_call ATTRIBUTE_UNUSED;
806 {
807 enum tree_code code = TREE_CODE (type);
808 int unsignedp = *punsignedp;
809
810 #ifdef PROMOTE_FOR_CALL_ONLY
811 if (! for_call)
812 return mode;
813 #endif
814
815 switch (code)
816 {
817 #ifdef PROMOTE_MODE
818 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
819 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
820 PROMOTE_MODE (mode, unsignedp, type);
821 break;
822 #endif
823
824 #ifdef POINTERS_EXTEND_UNSIGNED
825 case REFERENCE_TYPE:
826 case POINTER_TYPE:
827 mode = Pmode;
828 unsignedp = POINTERS_EXTEND_UNSIGNED;
829 break;
830 #endif
831
832 default:
833 break;
834 }
835
836 *punsignedp = unsignedp;
837 return mode;
838 }
839 \f
840 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
841 This pops when ADJUST is positive. ADJUST need not be constant. */
842
843 void
844 adjust_stack (adjust)
845 rtx adjust;
846 {
847 rtx temp;
848 adjust = protect_from_queue (adjust, 0);
849
850 if (adjust == const0_rtx)
851 return;
852
853 temp = expand_binop (Pmode,
854 #ifdef STACK_GROWS_DOWNWARD
855 add_optab,
856 #else
857 sub_optab,
858 #endif
859 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
860 OPTAB_LIB_WIDEN);
861
862 if (temp != stack_pointer_rtx)
863 emit_move_insn (stack_pointer_rtx, temp);
864 }
865
866 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
867 This pushes when ADJUST is positive. ADJUST need not be constant. */
868
869 void
870 anti_adjust_stack (adjust)
871 rtx adjust;
872 {
873 rtx temp;
874 adjust = protect_from_queue (adjust, 0);
875
876 if (adjust == const0_rtx)
877 return;
878
879 temp = expand_binop (Pmode,
880 #ifdef STACK_GROWS_DOWNWARD
881 sub_optab,
882 #else
883 add_optab,
884 #endif
885 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
886 OPTAB_LIB_WIDEN);
887
888 if (temp != stack_pointer_rtx)
889 emit_move_insn (stack_pointer_rtx, temp);
890 }
891
892 /* Round the size of a block to be pushed up to the boundary required
893 by this machine. SIZE is the desired size, which need not be constant. */
894
895 rtx
896 round_push (size)
897 rtx size;
898 {
899 #ifdef PREFERRED_STACK_BOUNDARY
900 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
901 if (align == 1)
902 return size;
903 if (GET_CODE (size) == CONST_INT)
904 {
905 int new = (INTVAL (size) + align - 1) / align * align;
906 if (INTVAL (size) != new)
907 size = GEN_INT (new);
908 }
909 else
910 {
911 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
912 but we know it can't. So add ourselves and then do
913 TRUNC_DIV_EXPR. */
914 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
915 NULL_RTX, 1, OPTAB_LIB_WIDEN);
916 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
917 NULL_RTX, 1);
918 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
919 }
920 #endif /* PREFERRED_STACK_BOUNDARY */
921 return size;
922 }
923 \f
924 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
925 to a previously-created save area. If no save area has been allocated,
926 this function will allocate one. If a save area is specified, it
927 must be of the proper mode.
928
929 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
930 are emitted at the current position. */
931
932 void
933 emit_stack_save (save_level, psave, after)
934 enum save_level save_level;
935 rtx *psave;
936 rtx after;
937 {
938 rtx sa = *psave;
939 /* The default is that we use a move insn and save in a Pmode object. */
940 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
941 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
942
943 /* See if this machine has anything special to do for this kind of save. */
944 switch (save_level)
945 {
946 #ifdef HAVE_save_stack_block
947 case SAVE_BLOCK:
948 if (HAVE_save_stack_block)
949 fcn = gen_save_stack_block;
950 break;
951 #endif
952 #ifdef HAVE_save_stack_function
953 case SAVE_FUNCTION:
954 if (HAVE_save_stack_function)
955 fcn = gen_save_stack_function;
956 break;
957 #endif
958 #ifdef HAVE_save_stack_nonlocal
959 case SAVE_NONLOCAL:
960 if (HAVE_save_stack_nonlocal)
961 fcn = gen_save_stack_nonlocal;
962 break;
963 #endif
964 default:
965 break;
966 }
967
968 /* If there is no save area and we have to allocate one, do so. Otherwise
969 verify the save area is the proper mode. */
970
971 if (sa == 0)
972 {
973 if (mode != VOIDmode)
974 {
975 if (save_level == SAVE_NONLOCAL)
976 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
977 else
978 *psave = sa = gen_reg_rtx (mode);
979 }
980 }
981 else
982 {
983 if (mode == VOIDmode || GET_MODE (sa) != mode)
984 abort ();
985 }
986
987 if (after)
988 {
989 rtx seq;
990
991 start_sequence ();
992 /* We must validize inside the sequence, to ensure that any instructions
993 created by the validize call also get moved to the right place. */
994 if (sa != 0)
995 sa = validize_mem (sa);
996 emit_insn (fcn (sa, stack_pointer_rtx));
997 seq = gen_sequence ();
998 end_sequence ();
999 emit_insn_after (seq, after);
1000 }
1001 else
1002 {
1003 if (sa != 0)
1004 sa = validize_mem (sa);
1005 emit_insn (fcn (sa, stack_pointer_rtx));
1006 }
1007 }
1008
1009 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1010 area made by emit_stack_save. If it is zero, we have nothing to do.
1011
1012 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1013 current position. */
1014
1015 void
1016 emit_stack_restore (save_level, sa, after)
1017 enum save_level save_level;
1018 rtx after;
1019 rtx sa;
1020 {
1021 /* The default is that we use a move insn. */
1022 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
1023
1024 /* See if this machine has anything special to do for this kind of save. */
1025 switch (save_level)
1026 {
1027 #ifdef HAVE_restore_stack_block
1028 case SAVE_BLOCK:
1029 if (HAVE_restore_stack_block)
1030 fcn = gen_restore_stack_block;
1031 break;
1032 #endif
1033 #ifdef HAVE_restore_stack_function
1034 case SAVE_FUNCTION:
1035 if (HAVE_restore_stack_function)
1036 fcn = gen_restore_stack_function;
1037 break;
1038 #endif
1039 #ifdef HAVE_restore_stack_nonlocal
1040 case SAVE_NONLOCAL:
1041 if (HAVE_restore_stack_nonlocal)
1042 fcn = gen_restore_stack_nonlocal;
1043 break;
1044 #endif
1045 default:
1046 break;
1047 }
1048
1049 if (sa != 0)
1050 sa = validize_mem (sa);
1051
1052 if (after)
1053 {
1054 rtx seq;
1055
1056 start_sequence ();
1057 emit_insn (fcn (stack_pointer_rtx, sa));
1058 seq = gen_sequence ();
1059 end_sequence ();
1060 emit_insn_after (seq, after);
1061 }
1062 else
1063 emit_insn (fcn (stack_pointer_rtx, sa));
1064 }
1065 \f
1066 #ifdef SETJMP_VIA_SAVE_AREA
1067 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1068 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1069 platforms, the dynamic stack space used can corrupt the original
1070 frame, thus causing a crash if a longjmp unwinds to it. */
1071
1072 void
1073 optimize_save_area_alloca (insns)
1074 rtx insns;
1075 {
1076 rtx insn;
1077
1078 for (insn = insns; insn; insn = NEXT_INSN(insn))
1079 {
1080 rtx note;
1081
1082 if (GET_CODE (insn) != INSN)
1083 continue;
1084
1085 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1086 {
1087 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1088 continue;
1089
1090 if (!current_function_calls_setjmp)
1091 {
1092 rtx pat = PATTERN (insn);
1093
1094 /* If we do not see the note in a pattern matching
1095 these precise characteristics, we did something
1096 entirely wrong in allocate_dynamic_stack_space.
1097
1098 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1099 was defined on a machine where stacks grow towards higher
1100 addresses.
1101
1102 Right now only supported port with stack that grow upward
1103 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1104 if (GET_CODE (pat) != SET
1105 || SET_DEST (pat) != stack_pointer_rtx
1106 || GET_CODE (SET_SRC (pat)) != MINUS
1107 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1108 abort ();
1109
1110 /* This will now be transformed into a (set REG REG)
1111 so we can just blow away all the other notes. */
1112 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1113 REG_NOTES (insn) = NULL_RTX;
1114 }
1115 else
1116 {
1117 /* setjmp was called, we must remove the REG_SAVE_AREA
1118 note so that later passes do not get confused by its
1119 presence. */
1120 if (note == REG_NOTES (insn))
1121 {
1122 REG_NOTES (insn) = XEXP (note, 1);
1123 }
1124 else
1125 {
1126 rtx srch;
1127
1128 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1129 if (XEXP (srch, 1) == note)
1130 break;
1131
1132 if (srch == NULL_RTX)
1133 abort();
1134
1135 XEXP (srch, 1) = XEXP (note, 1);
1136 }
1137 }
1138 /* Once we've seen the note of interest, we need not look at
1139 the rest of them. */
1140 break;
1141 }
1142 }
1143 }
1144 #endif /* SETJMP_VIA_SAVE_AREA */
1145
1146 /* Return an rtx representing the address of an area of memory dynamically
1147 pushed on the stack. This region of memory is always aligned to
1148 a multiple of BIGGEST_ALIGNMENT.
1149
1150 Any required stack pointer alignment is preserved.
1151
1152 SIZE is an rtx representing the size of the area.
1153 TARGET is a place in which the address can be placed.
1154
1155 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1156
1157 rtx
1158 allocate_dynamic_stack_space (size, target, known_align)
1159 rtx size;
1160 rtx target;
1161 int known_align;
1162 {
1163 #ifdef SETJMP_VIA_SAVE_AREA
1164 rtx setjmpless_size = NULL_RTX;
1165 #endif
1166
1167 /* If we're asking for zero bytes, it doesn't matter what we point
1168 to since we can't dereference it. But return a reasonable
1169 address anyway. */
1170 if (size == const0_rtx)
1171 return virtual_stack_dynamic_rtx;
1172
1173 /* Otherwise, show we're calling alloca or equivalent. */
1174 current_function_calls_alloca = 1;
1175
1176 /* Ensure the size is in the proper mode. */
1177 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1178 size = convert_to_mode (Pmode, size, 1);
1179
1180 /* We will need to ensure that the address we return is aligned to
1181 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1182 always know its final value at this point in the compilation (it
1183 might depend on the size of the outgoing parameter lists, for
1184 example), so we must align the value to be returned in that case.
1185 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1186 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1187 We must also do an alignment operation on the returned value if
1188 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1189
1190 If we have to align, we must leave space in SIZE for the hole
1191 that might result from the alignment operation. */
1192
1193 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1194 #define MUST_ALIGN 1
1195 #else
1196 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1197 #endif
1198
1199 if (MUST_ALIGN)
1200 {
1201 if (GET_CODE (size) == CONST_INT)
1202 size = GEN_INT (INTVAL (size)
1203 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
1204 else
1205 size = expand_binop (Pmode, add_optab, size,
1206 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1207 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1208 }
1209
1210 #ifdef SETJMP_VIA_SAVE_AREA
1211 /* If setjmp restores regs from a save area in the stack frame,
1212 avoid clobbering the reg save area. Note that the offset of
1213 virtual_incoming_args_rtx includes the preallocated stack args space.
1214 It would be no problem to clobber that, but it's on the wrong side
1215 of the old save area. */
1216 {
1217 rtx dynamic_offset
1218 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1219 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1220
1221 if (!current_function_calls_setjmp)
1222 {
1223 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1224
1225 /* See optimize_save_area_alloca to understand what is being
1226 set up here. */
1227
1228 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1229 /* If anyone creates a target with these characteristics, let them
1230 know that our optimization cannot work correctly in such a case. */
1231 abort();
1232 #endif
1233
1234 if (GET_CODE (size) == CONST_INT)
1235 {
1236 int new = INTVAL (size) / align * align;
1237
1238 if (INTVAL (size) != new)
1239 setjmpless_size = GEN_INT (new);
1240 else
1241 setjmpless_size = size;
1242 }
1243 else
1244 {
1245 /* Since we know overflow is not possible, we avoid using
1246 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1247 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1248 GEN_INT (align), NULL_RTX, 1);
1249 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1250 GEN_INT (align), NULL_RTX, 1);
1251 }
1252 /* Our optimization works based upon being able to perform a simple
1253 transformation of this RTL into a (set REG REG) so make sure things
1254 did in fact end up in a REG. */
1255 if (!register_operand (setjmpless_size, Pmode))
1256 setjmpless_size = force_reg (Pmode, setjmpless_size);
1257 }
1258
1259 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1260 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1261 }
1262 #endif /* SETJMP_VIA_SAVE_AREA */
1263
1264 /* Round the size to a multiple of the required stack alignment.
1265 Since the stack if presumed to be rounded before this allocation,
1266 this will maintain the required alignment.
1267
1268 If the stack grows downward, we could save an insn by subtracting
1269 SIZE from the stack pointer and then aligning the stack pointer.
1270 The problem with this is that the stack pointer may be unaligned
1271 between the execution of the subtraction and alignment insns and
1272 some machines do not allow this. Even on those that do, some
1273 signal handlers malfunction if a signal should occur between those
1274 insns. Since this is an extremely rare event, we have no reliable
1275 way of knowing which systems have this problem. So we avoid even
1276 momentarily mis-aligning the stack. */
1277
1278 #ifdef PREFERRED_STACK_BOUNDARY
1279 /* If we added a variable amount to SIZE,
1280 we can no longer assume it is aligned. */
1281 #if !defined (SETJMP_VIA_SAVE_AREA)
1282 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1283 #endif
1284 size = round_push (size);
1285 #endif
1286
1287 do_pending_stack_adjust ();
1288
1289 /* If needed, check that we have the required amount of stack. Take into
1290 account what has already been checked. */
1291 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1292 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1293
1294 /* Don't use a TARGET that isn't a pseudo. */
1295 if (target == 0 || GET_CODE (target) != REG
1296 || REGNO (target) < FIRST_PSEUDO_REGISTER)
1297 target = gen_reg_rtx (Pmode);
1298
1299 mark_reg_pointer (target, known_align / BITS_PER_UNIT);
1300
1301 /* Perform the required allocation from the stack. Some systems do
1302 this differently than simply incrementing/decrementing from the
1303 stack pointer, such as acquiring the space by calling malloc(). */
1304 #ifdef HAVE_allocate_stack
1305 if (HAVE_allocate_stack)
1306 {
1307 enum machine_mode mode = STACK_SIZE_MODE;
1308
1309 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1310 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
1311 (target, Pmode)))
1312 #ifdef POINTERS_EXTEND_UNSIGNED
1313 target = convert_memory_address (Pmode, target);
1314 #else
1315 target = copy_to_mode_reg (Pmode, target);
1316 #endif
1317
1318 if (mode == VOIDmode)
1319 mode = Pmode;
1320
1321 size = convert_modes (mode, ptr_mode, size, 1);
1322 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
1323 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
1324 (size, mode)))
1325 size = copy_to_mode_reg (mode, size);
1326
1327 emit_insn (gen_allocate_stack (target, size));
1328 }
1329 else
1330 #endif
1331 {
1332 #ifndef STACK_GROWS_DOWNWARD
1333 emit_move_insn (target, virtual_stack_dynamic_rtx);
1334 #endif
1335 size = convert_modes (Pmode, ptr_mode, size, 1);
1336 anti_adjust_stack (size);
1337 #ifdef SETJMP_VIA_SAVE_AREA
1338 if (setjmpless_size != NULL_RTX)
1339 {
1340 rtx note_target = get_last_insn ();
1341
1342 REG_NOTES (note_target)
1343 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1344 REG_NOTES (note_target));
1345 }
1346 #endif /* SETJMP_VIA_SAVE_AREA */
1347 #ifdef STACK_GROWS_DOWNWARD
1348 emit_move_insn (target, virtual_stack_dynamic_rtx);
1349 #endif
1350 }
1351
1352 if (MUST_ALIGN)
1353 {
1354 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1355 but we know it can't. So add ourselves and then do
1356 TRUNC_DIV_EXPR. */
1357 target = expand_binop (Pmode, add_optab, target,
1358 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1359 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1360 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1361 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1362 NULL_RTX, 1);
1363 target = expand_mult (Pmode, target,
1364 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1365 NULL_RTX, 1);
1366 }
1367
1368 /* Some systems require a particular insn to refer to the stack
1369 to make the pages exist. */
1370 #ifdef HAVE_probe
1371 if (HAVE_probe)
1372 emit_insn (gen_probe ());
1373 #endif
1374
1375 /* Record the new stack level for nonlocal gotos. */
1376 if (nonlocal_goto_handler_slots != 0)
1377 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1378
1379 return target;
1380 }
1381 \f
1382 /* Emit one stack probe at ADDRESS, an address within the stack. */
1383
1384 static void
1385 emit_stack_probe (address)
1386 rtx address;
1387 {
1388 rtx memref = gen_rtx_MEM (word_mode, address);
1389
1390 MEM_VOLATILE_P (memref) = 1;
1391
1392 if (STACK_CHECK_PROBE_LOAD)
1393 emit_move_insn (gen_reg_rtx (word_mode), memref);
1394 else
1395 emit_move_insn (memref, const0_rtx);
1396 }
1397
1398 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1399 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1400 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1401 subtract from the stack. If SIZE is constant, this is done
1402 with a fixed number of probes. Otherwise, we must make a loop. */
1403
1404 #ifdef STACK_GROWS_DOWNWARD
1405 #define STACK_GROW_OP MINUS
1406 #else
1407 #define STACK_GROW_OP PLUS
1408 #endif
1409
1410 void
1411 probe_stack_range (first, size)
1412 HOST_WIDE_INT first;
1413 rtx size;
1414 {
1415 /* First see if we have an insn to check the stack. Use it if so. */
1416 #ifdef HAVE_check_stack
1417 if (HAVE_check_stack)
1418 {
1419 rtx last_addr
1420 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1421 stack_pointer_rtx,
1422 plus_constant (size, first)),
1423 NULL_RTX);
1424
1425 if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
1426 && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
1427 (last_addr, Pmode)))
1428 last_addr = copy_to_mode_reg (Pmode, last_addr);
1429
1430 emit_insn (gen_check_stack (last_addr));
1431 return;
1432 }
1433 #endif
1434
1435 /* If we have to generate explicit probes, see if we have a constant
1436 small number of them to generate. If so, that's the easy case. */
1437 if (GET_CODE (size) == CONST_INT
1438 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1439 {
1440 HOST_WIDE_INT offset;
1441
1442 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1443 for values of N from 1 until it exceeds LAST. If only one
1444 probe is needed, this will not generate any code. Then probe
1445 at LAST. */
1446 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1447 offset < INTVAL (size);
1448 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1449 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1450 stack_pointer_rtx,
1451 GEN_INT (offset)));
1452
1453 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1454 stack_pointer_rtx,
1455 plus_constant (size, first)));
1456 }
1457
1458 /* In the variable case, do the same as above, but in a loop. We emit loop
1459 notes so that loop optimization can be done. */
1460 else
1461 {
1462 rtx test_addr
1463 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1464 stack_pointer_rtx,
1465 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1466 NULL_RTX);
1467 rtx last_addr
1468 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1469 stack_pointer_rtx,
1470 plus_constant (size, first)),
1471 NULL_RTX);
1472 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1473 rtx loop_lab = gen_label_rtx ();
1474 rtx test_lab = gen_label_rtx ();
1475 rtx end_lab = gen_label_rtx ();
1476 rtx temp;
1477
1478 if (GET_CODE (test_addr) != REG
1479 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1480 test_addr = force_reg (Pmode, test_addr);
1481
1482 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1483 emit_jump (test_lab);
1484
1485 emit_label (loop_lab);
1486 emit_stack_probe (test_addr);
1487
1488 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1489
1490 #ifdef STACK_GROWS_DOWNWARD
1491 #define CMP_OPCODE GTU
1492 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1493 1, OPTAB_WIDEN);
1494 #else
1495 #define CMP_OPCODE LTU
1496 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1497 1, OPTAB_WIDEN);
1498 #endif
1499
1500 if (temp != test_addr)
1501 abort ();
1502
1503 emit_label (test_lab);
1504 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1505 NULL_RTX, Pmode, 1, 0, loop_lab);
1506 emit_jump (end_lab);
1507 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1508 emit_label (end_lab);
1509
1510 /* If will be doing stupid optimization, show test_addr is still live. */
1511 if (obey_regdecls)
1512 emit_insn (gen_rtx_USE (VOIDmode, test_addr));
1513
1514 emit_stack_probe (last_addr);
1515 }
1516 }
1517 \f
1518 /* Return an rtx representing the register or memory location
1519 in which a scalar value of data type VALTYPE
1520 was returned by a function call to function FUNC.
1521 FUNC is a FUNCTION_DECL node if the precise function is known,
1522 otherwise 0. */
1523
1524 rtx
1525 hard_function_value (valtype, func)
1526 tree valtype;
1527 tree func ATTRIBUTE_UNUSED;
1528 {
1529 rtx val = FUNCTION_VALUE (valtype, func);
1530 if (GET_CODE (val) == REG
1531 && GET_MODE (val) == BLKmode)
1532 {
1533 int bytes = int_size_in_bytes (valtype);
1534 enum machine_mode tmpmode;
1535 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1536 tmpmode != VOIDmode;
1537 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1538 {
1539 /* Have we found a large enough mode? */
1540 if (GET_MODE_SIZE (tmpmode) >= bytes)
1541 break;
1542 }
1543
1544 /* No suitable mode found. */
1545 if (tmpmode == VOIDmode)
1546 abort ();
1547
1548 PUT_MODE (val, tmpmode);
1549 }
1550 return val;
1551 }
1552
1553 /* Return an rtx representing the register or memory location
1554 in which a scalar value of mode MODE was returned by a library call. */
1555
1556 rtx
1557 hard_libcall_value (mode)
1558 enum machine_mode mode;
1559 {
1560 return LIBCALL_VALUE (mode);
1561 }
1562
1563 /* Look up the tree code for a given rtx code
1564 to provide the arithmetic operation for REAL_ARITHMETIC.
1565 The function returns an int because the caller may not know
1566 what `enum tree_code' means. */
1567
1568 int
1569 rtx_to_tree_code (code)
1570 enum rtx_code code;
1571 {
1572 enum tree_code tcode;
1573
1574 switch (code)
1575 {
1576 case PLUS:
1577 tcode = PLUS_EXPR;
1578 break;
1579 case MINUS:
1580 tcode = MINUS_EXPR;
1581 break;
1582 case MULT:
1583 tcode = MULT_EXPR;
1584 break;
1585 case DIV:
1586 tcode = RDIV_EXPR;
1587 break;
1588 case SMIN:
1589 tcode = MIN_EXPR;
1590 break;
1591 case SMAX:
1592 tcode = MAX_EXPR;
1593 break;
1594 default:
1595 tcode = LAST_AND_UNUSED_TREE_CODE;
1596 break;
1597 }
1598 return ((int) tcode);
1599 }