]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
explow.c (plus_constant_wide): Don't immediately return with result of recursive...
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 91, 94-97, 1998 Free Software Foundation, Inc.
3
4 This file is part of GNU CC.
5
6 GNU CC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 2, or (at your option)
9 any later version.
10
11 GNU CC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GNU CC; see the file COPYING. If not, write to
18 the Free Software Foundation, 59 Temple Place - Suite 330,
19 Boston, MA 02111-1307, USA. */
20
21
22 #include "config.h"
23 #include "system.h"
24 #include "rtl.h"
25 #include "tree.h"
26 #include "flags.h"
27 #include "expr.h"
28 #include "hard-reg-set.h"
29 #include "insn-config.h"
30 #include "recog.h"
31 #include "insn-flags.h"
32 #include "insn-codes.h"
33
34 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
35 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
36 #endif
37
38 static rtx break_out_memory_refs PROTO((rtx));
39 static void emit_stack_probe PROTO((rtx));
40 /* Return an rtx for the sum of X and the integer C.
41
42 This function should be used via the `plus_constant' macro. */
43
44 rtx
45 plus_constant_wide (x, c)
46 register rtx x;
47 register HOST_WIDE_INT c;
48 {
49 register RTX_CODE code;
50 register enum machine_mode mode;
51 register rtx tem;
52 int all_constant = 0;
53
54 if (c == 0)
55 return x;
56
57 restart:
58
59 code = GET_CODE (x);
60 mode = GET_MODE (x);
61 switch (code)
62 {
63 case CONST_INT:
64 return GEN_INT (INTVAL (x) + c);
65
66 case CONST_DOUBLE:
67 {
68 HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
69 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
70 HOST_WIDE_INT l2 = c;
71 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
72 HOST_WIDE_INT lv, hv;
73
74 add_double (l1, h1, l2, h2, &lv, &hv);
75
76 return immed_double_const (lv, hv, VOIDmode);
77 }
78
79 case MEM:
80 /* If this is a reference to the constant pool, try replacing it with
81 a reference to a new constant. If the resulting address isn't
82 valid, don't return it because we have no way to validize it. */
83 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
84 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
85 {
86 /* Any rtl we create here must go in a saveable obstack, since
87 we might have been called from within combine. */
88 push_obstacks_nochange ();
89 rtl_in_saveable_obstack ();
90 tem
91 = force_const_mem (GET_MODE (x),
92 plus_constant (get_pool_constant (XEXP (x, 0)),
93 c));
94 pop_obstacks ();
95 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
96 return tem;
97 }
98 break;
99
100 case CONST:
101 /* If adding to something entirely constant, set a flag
102 so that we can add a CONST around the result. */
103 x = XEXP (x, 0);
104 all_constant = 1;
105 goto restart;
106
107 case SYMBOL_REF:
108 case LABEL_REF:
109 all_constant = 1;
110 break;
111
112 case PLUS:
113 /* The interesting case is adding the integer to a sum.
114 Look for constant term in the sum and combine
115 with C. For an integer constant term, we make a combined
116 integer. For a constant term that is not an explicit integer,
117 we cannot really combine, but group them together anyway.
118
119 Restart or use a recursive call in case the remaining operand is
120 something that we handle specially, such as a SYMBOL_REF.
121
122 We may not immediately return from the recursive call here, lest
123 all_constant gets lost. */
124
125 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
126 {
127 c += INTVAL (XEXP (x, 1));
128 x = XEXP (x, 0);
129 goto restart;
130 }
131 else if (CONSTANT_P (XEXP (x, 0)))
132 {
133 x = gen_rtx_PLUS (mode,
134 plus_constant (XEXP (x, 0), c),
135 XEXP (x, 1));
136 c = 0;
137 }
138 else if (CONSTANT_P (XEXP (x, 1)))
139 {
140 x = gen_rtx_PLUS (mode,
141 XEXP (x, 0),
142 plus_constant (XEXP (x, 1), c));
143 c = 0;
144 }
145 break;
146
147 default:
148 break;
149 }
150
151 if (c != 0)
152 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
153
154 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
155 return x;
156 else if (all_constant)
157 return gen_rtx_CONST (mode, x);
158 else
159 return x;
160 }
161
162 /* This is the same as `plus_constant', except that it handles LO_SUM.
163
164 This function should be used via the `plus_constant_for_output' macro. */
165
166 rtx
167 plus_constant_for_output_wide (x, c)
168 register rtx x;
169 register HOST_WIDE_INT c;
170 {
171 register enum machine_mode mode = GET_MODE (x);
172
173 if (GET_CODE (x) == LO_SUM)
174 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
175 plus_constant_for_output (XEXP (x, 1), c));
176
177 else
178 return plus_constant (x, c);
179 }
180 \f
181 /* If X is a sum, return a new sum like X but lacking any constant terms.
182 Add all the removed constant terms into *CONSTPTR.
183 X itself is not altered. The result != X if and only if
184 it is not isomorphic to X. */
185
186 rtx
187 eliminate_constant_term (x, constptr)
188 rtx x;
189 rtx *constptr;
190 {
191 register rtx x0, x1;
192 rtx tem;
193
194 if (GET_CODE (x) != PLUS)
195 return x;
196
197 /* First handle constants appearing at this level explicitly. */
198 if (GET_CODE (XEXP (x, 1)) == CONST_INT
199 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
200 XEXP (x, 1)))
201 && GET_CODE (tem) == CONST_INT)
202 {
203 *constptr = tem;
204 return eliminate_constant_term (XEXP (x, 0), constptr);
205 }
206
207 tem = const0_rtx;
208 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
209 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
210 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
211 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
212 *constptr, tem))
213 && GET_CODE (tem) == CONST_INT)
214 {
215 *constptr = tem;
216 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
217 }
218
219 return x;
220 }
221
222 /* Returns the insn that next references REG after INSN, or 0
223 if REG is clobbered before next referenced or we cannot find
224 an insn that references REG in a straight-line piece of code. */
225
226 rtx
227 find_next_ref (reg, insn)
228 rtx reg;
229 rtx insn;
230 {
231 rtx next;
232
233 for (insn = NEXT_INSN (insn); insn; insn = next)
234 {
235 next = NEXT_INSN (insn);
236 if (GET_CODE (insn) == NOTE)
237 continue;
238 if (GET_CODE (insn) == CODE_LABEL
239 || GET_CODE (insn) == BARRIER)
240 return 0;
241 if (GET_CODE (insn) == INSN
242 || GET_CODE (insn) == JUMP_INSN
243 || GET_CODE (insn) == CALL_INSN)
244 {
245 if (reg_set_p (reg, insn))
246 return 0;
247 if (reg_mentioned_p (reg, PATTERN (insn)))
248 return insn;
249 if (GET_CODE (insn) == JUMP_INSN)
250 {
251 if (simplejump_p (insn))
252 next = JUMP_LABEL (insn);
253 else
254 return 0;
255 }
256 if (GET_CODE (insn) == CALL_INSN
257 && REGNO (reg) < FIRST_PSEUDO_REGISTER
258 && call_used_regs[REGNO (reg)])
259 return 0;
260 }
261 else
262 abort ();
263 }
264 return 0;
265 }
266
267 /* Return an rtx for the size in bytes of the value of EXP. */
268
269 rtx
270 expr_size (exp)
271 tree exp;
272 {
273 tree size = size_in_bytes (TREE_TYPE (exp));
274
275 if (TREE_CODE (size) != INTEGER_CST
276 && contains_placeholder_p (size))
277 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
278
279 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
280 EXPAND_MEMORY_USE_BAD);
281 }
282 \f
283 /* Return a copy of X in which all memory references
284 and all constants that involve symbol refs
285 have been replaced with new temporary registers.
286 Also emit code to load the memory locations and constants
287 into those registers.
288
289 If X contains no such constants or memory references,
290 X itself (not a copy) is returned.
291
292 If a constant is found in the address that is not a legitimate constant
293 in an insn, it is left alone in the hope that it might be valid in the
294 address.
295
296 X may contain no arithmetic except addition, subtraction and multiplication.
297 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
298
299 static rtx
300 break_out_memory_refs (x)
301 register rtx x;
302 {
303 if (GET_CODE (x) == MEM
304 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
305 && GET_MODE (x) != VOIDmode))
306 x = force_reg (GET_MODE (x), x);
307 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
308 || GET_CODE (x) == MULT)
309 {
310 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
311 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
312
313 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
314 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
315 }
316
317 return x;
318 }
319
320 #ifdef POINTERS_EXTEND_UNSIGNED
321
322 /* Given X, a memory address in ptr_mode, convert it to an address
323 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
324 the fact that pointers are not allowed to overflow by commuting arithmetic
325 operations over conversions so that address arithmetic insns can be
326 used. */
327
328 rtx
329 convert_memory_address (to_mode, x)
330 enum machine_mode to_mode;
331 rtx x;
332 {
333 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
334 rtx temp;
335
336 /* Here we handle some special cases. If none of them apply, fall through
337 to the default case. */
338 switch (GET_CODE (x))
339 {
340 case CONST_INT:
341 case CONST_DOUBLE:
342 return x;
343
344 case LABEL_REF:
345 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
346 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
347 return temp;
348
349 case SYMBOL_REF:
350 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
351 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
352 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
353 return temp;
354
355 case CONST:
356 return gen_rtx_CONST (to_mode,
357 convert_memory_address (to_mode, XEXP (x, 0)));
358
359 case PLUS:
360 case MULT:
361 /* For addition the second operand is a small constant, we can safely
362 permute the conversion and addition operation. We can always safely
363 permute them if we are making the address narrower. In addition,
364 always permute the operations if this is a constant. */
365 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
366 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
367 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
368 || CONSTANT_P (XEXP (x, 0)))))
369 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
370 convert_memory_address (to_mode, XEXP (x, 0)),
371 convert_memory_address (to_mode, XEXP (x, 1)));
372 break;
373
374 default:
375 break;
376 }
377
378 return convert_modes (to_mode, from_mode,
379 x, POINTERS_EXTEND_UNSIGNED);
380 }
381 #endif
382
383 /* Given a memory address or facsimile X, construct a new address,
384 currently equivalent, that is stable: future stores won't change it.
385
386 X must be composed of constants, register and memory references
387 combined with addition, subtraction and multiplication:
388 in other words, just what you can get from expand_expr if sum_ok is 1.
389
390 Works by making copies of all regs and memory locations used
391 by X and combining them the same way X does.
392 You could also stabilize the reference to this address
393 by copying the address to a register with copy_to_reg;
394 but then you wouldn't get indexed addressing in the reference. */
395
396 rtx
397 copy_all_regs (x)
398 register rtx x;
399 {
400 if (GET_CODE (x) == REG)
401 {
402 if (REGNO (x) != FRAME_POINTER_REGNUM
403 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
404 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
405 #endif
406 )
407 x = copy_to_reg (x);
408 }
409 else if (GET_CODE (x) == MEM)
410 x = copy_to_reg (x);
411 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
412 || GET_CODE (x) == MULT)
413 {
414 register rtx op0 = copy_all_regs (XEXP (x, 0));
415 register rtx op1 = copy_all_regs (XEXP (x, 1));
416 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
417 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
418 }
419 return x;
420 }
421 \f
422 /* Return something equivalent to X but valid as a memory address
423 for something of mode MODE. When X is not itself valid, this
424 works by copying X or subexpressions of it into registers. */
425
426 rtx
427 memory_address (mode, x)
428 enum machine_mode mode;
429 register rtx x;
430 {
431 register rtx oldx = x;
432
433 if (GET_CODE (x) == ADDRESSOF)
434 return x;
435
436 #ifdef POINTERS_EXTEND_UNSIGNED
437 if (GET_MODE (x) == ptr_mode)
438 x = convert_memory_address (Pmode, x);
439 #endif
440
441 /* By passing constant addresses thru registers
442 we get a chance to cse them. */
443 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
444 x = force_reg (Pmode, x);
445
446 /* Accept a QUEUED that refers to a REG
447 even though that isn't a valid address.
448 On attempting to put this in an insn we will call protect_from_queue
449 which will turn it into a REG, which is valid. */
450 else if (GET_CODE (x) == QUEUED
451 && GET_CODE (QUEUED_VAR (x)) == REG)
452 ;
453
454 /* We get better cse by rejecting indirect addressing at this stage.
455 Let the combiner create indirect addresses where appropriate.
456 For now, generate the code so that the subexpressions useful to share
457 are visible. But not if cse won't be done! */
458 else
459 {
460 if (! cse_not_expected && GET_CODE (x) != REG)
461 x = break_out_memory_refs (x);
462
463 /* At this point, any valid address is accepted. */
464 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
465
466 /* If it was valid before but breaking out memory refs invalidated it,
467 use it the old way. */
468 if (memory_address_p (mode, oldx))
469 goto win2;
470
471 /* Perform machine-dependent transformations on X
472 in certain cases. This is not necessary since the code
473 below can handle all possible cases, but machine-dependent
474 transformations can make better code. */
475 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
476
477 /* PLUS and MULT can appear in special ways
478 as the result of attempts to make an address usable for indexing.
479 Usually they are dealt with by calling force_operand, below.
480 But a sum containing constant terms is special
481 if removing them makes the sum a valid address:
482 then we generate that address in a register
483 and index off of it. We do this because it often makes
484 shorter code, and because the addresses thus generated
485 in registers often become common subexpressions. */
486 if (GET_CODE (x) == PLUS)
487 {
488 rtx constant_term = const0_rtx;
489 rtx y = eliminate_constant_term (x, &constant_term);
490 if (constant_term == const0_rtx
491 || ! memory_address_p (mode, y))
492 x = force_operand (x, NULL_RTX);
493 else
494 {
495 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
496 if (! memory_address_p (mode, y))
497 x = force_operand (x, NULL_RTX);
498 else
499 x = y;
500 }
501 }
502
503 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
504 x = force_operand (x, NULL_RTX);
505
506 /* If we have a register that's an invalid address,
507 it must be a hard reg of the wrong class. Copy it to a pseudo. */
508 else if (GET_CODE (x) == REG)
509 x = copy_to_reg (x);
510
511 /* Last resort: copy the value to a register, since
512 the register is a valid address. */
513 else
514 x = force_reg (Pmode, x);
515
516 goto done;
517
518 win2:
519 x = oldx;
520 win:
521 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
522 /* Don't copy an addr via a reg if it is one of our stack slots. */
523 && ! (GET_CODE (x) == PLUS
524 && (XEXP (x, 0) == virtual_stack_vars_rtx
525 || XEXP (x, 0) == virtual_incoming_args_rtx)))
526 {
527 if (general_operand (x, Pmode))
528 x = force_reg (Pmode, x);
529 else
530 x = force_operand (x, NULL_RTX);
531 }
532 }
533
534 done:
535
536 /* If we didn't change the address, we are done. Otherwise, mark
537 a reg as a pointer if we have REG or REG + CONST_INT. */
538 if (oldx == x)
539 return x;
540 else if (GET_CODE (x) == REG)
541 mark_reg_pointer (x, 1);
542 else if (GET_CODE (x) == PLUS
543 && GET_CODE (XEXP (x, 0)) == REG
544 && GET_CODE (XEXP (x, 1)) == CONST_INT)
545 mark_reg_pointer (XEXP (x, 0), 1);
546
547 /* OLDX may have been the address on a temporary. Update the address
548 to indicate that X is now used. */
549 update_temp_slot_address (oldx, x);
550
551 return x;
552 }
553
554 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
555
556 rtx
557 memory_address_noforce (mode, x)
558 enum machine_mode mode;
559 rtx x;
560 {
561 int ambient_force_addr = flag_force_addr;
562 rtx val;
563
564 flag_force_addr = 0;
565 val = memory_address (mode, x);
566 flag_force_addr = ambient_force_addr;
567 return val;
568 }
569
570 /* Convert a mem ref into one with a valid memory address.
571 Pass through anything else unchanged. */
572
573 rtx
574 validize_mem (ref)
575 rtx ref;
576 {
577 if (GET_CODE (ref) != MEM)
578 return ref;
579 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
580 return ref;
581 /* Don't alter REF itself, since that is probably a stack slot. */
582 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
583 }
584 \f
585 /* Return a modified copy of X with its memory address copied
586 into a temporary register to protect it from side effects.
587 If X is not a MEM, it is returned unchanged (and not copied).
588 Perhaps even if it is a MEM, if there is no need to change it. */
589
590 rtx
591 stabilize (x)
592 rtx x;
593 {
594 register rtx addr;
595 if (GET_CODE (x) != MEM)
596 return x;
597 addr = XEXP (x, 0);
598 if (rtx_unstable_p (addr))
599 {
600 rtx temp = copy_all_regs (addr);
601 rtx mem;
602 if (GET_CODE (temp) != REG)
603 temp = copy_to_reg (temp);
604 mem = gen_rtx_MEM (GET_MODE (x), temp);
605
606 /* Mark returned memref with in_struct if it's in an array or
607 structure. Copy const and volatile from original memref. */
608
609 MEM_IN_STRUCT_P (mem) = MEM_IN_STRUCT_P (x) || GET_CODE (addr) == PLUS;
610 RTX_UNCHANGING_P (mem) = RTX_UNCHANGING_P (x);
611 MEM_VOLATILE_P (mem) = MEM_VOLATILE_P (x);
612
613 /* Since the new MEM is just like the old X, it can alias only
614 the things that X could. */
615 MEM_ALIAS_SET (mem) = MEM_ALIAS_SET (x);
616
617 return mem;
618 }
619 return x;
620 }
621 \f
622 /* Copy the value or contents of X to a new temp reg and return that reg. */
623
624 rtx
625 copy_to_reg (x)
626 rtx x;
627 {
628 register rtx temp = gen_reg_rtx (GET_MODE (x));
629
630 /* If not an operand, must be an address with PLUS and MULT so
631 do the computation. */
632 if (! general_operand (x, VOIDmode))
633 x = force_operand (x, temp);
634
635 if (x != temp)
636 emit_move_insn (temp, x);
637
638 return temp;
639 }
640
641 /* Like copy_to_reg but always give the new register mode Pmode
642 in case X is a constant. */
643
644 rtx
645 copy_addr_to_reg (x)
646 rtx x;
647 {
648 return copy_to_mode_reg (Pmode, x);
649 }
650
651 /* Like copy_to_reg but always give the new register mode MODE
652 in case X is a constant. */
653
654 rtx
655 copy_to_mode_reg (mode, x)
656 enum machine_mode mode;
657 rtx x;
658 {
659 register rtx temp = gen_reg_rtx (mode);
660
661 /* If not an operand, must be an address with PLUS and MULT so
662 do the computation. */
663 if (! general_operand (x, VOIDmode))
664 x = force_operand (x, temp);
665
666 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
667 abort ();
668 if (x != temp)
669 emit_move_insn (temp, x);
670 return temp;
671 }
672
673 /* Load X into a register if it is not already one.
674 Use mode MODE for the register.
675 X should be valid for mode MODE, but it may be a constant which
676 is valid for all integer modes; that's why caller must specify MODE.
677
678 The caller must not alter the value in the register we return,
679 since we mark it as a "constant" register. */
680
681 rtx
682 force_reg (mode, x)
683 enum machine_mode mode;
684 rtx x;
685 {
686 register rtx temp, insn, set;
687
688 if (GET_CODE (x) == REG)
689 return x;
690 temp = gen_reg_rtx (mode);
691 insn = emit_move_insn (temp, x);
692
693 /* Let optimizers know that TEMP's value never changes
694 and that X can be substituted for it. Don't get confused
695 if INSN set something else (such as a SUBREG of TEMP). */
696 if (CONSTANT_P (x)
697 && (set = single_set (insn)) != 0
698 && SET_DEST (set) == temp)
699 {
700 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
701
702 if (note)
703 XEXP (note, 0) = x;
704 else
705 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
706 }
707 return temp;
708 }
709
710 /* If X is a memory ref, copy its contents to a new temp reg and return
711 that reg. Otherwise, return X. */
712
713 rtx
714 force_not_mem (x)
715 rtx x;
716 {
717 register rtx temp;
718 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
719 return x;
720 temp = gen_reg_rtx (GET_MODE (x));
721 emit_move_insn (temp, x);
722 return temp;
723 }
724
725 /* Copy X to TARGET (if it's nonzero and a reg)
726 or to a new temp reg and return that reg.
727 MODE is the mode to use for X in case it is a constant. */
728
729 rtx
730 copy_to_suggested_reg (x, target, mode)
731 rtx x, target;
732 enum machine_mode mode;
733 {
734 register rtx temp;
735
736 if (target && GET_CODE (target) == REG)
737 temp = target;
738 else
739 temp = gen_reg_rtx (mode);
740
741 emit_move_insn (temp, x);
742 return temp;
743 }
744 \f
745 /* Return the mode to use to store a scalar of TYPE and MODE.
746 PUNSIGNEDP points to the signedness of the type and may be adjusted
747 to show what signedness to use on extension operations.
748
749 FOR_CALL is non-zero if this call is promoting args for a call. */
750
751 enum machine_mode
752 promote_mode (type, mode, punsignedp, for_call)
753 tree type;
754 enum machine_mode mode;
755 int *punsignedp;
756 int for_call ATTRIBUTE_UNUSED;
757 {
758 enum tree_code code = TREE_CODE (type);
759 int unsignedp = *punsignedp;
760
761 #ifdef PROMOTE_FOR_CALL_ONLY
762 if (! for_call)
763 return mode;
764 #endif
765
766 switch (code)
767 {
768 #ifdef PROMOTE_MODE
769 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
770 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
771 PROMOTE_MODE (mode, unsignedp, type);
772 break;
773 #endif
774
775 #ifdef POINTERS_EXTEND_UNSIGNED
776 case REFERENCE_TYPE:
777 case POINTER_TYPE:
778 mode = Pmode;
779 unsignedp = POINTERS_EXTEND_UNSIGNED;
780 break;
781 #endif
782
783 default:
784 break;
785 }
786
787 *punsignedp = unsignedp;
788 return mode;
789 }
790 \f
791 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
792 This pops when ADJUST is positive. ADJUST need not be constant. */
793
794 void
795 adjust_stack (adjust)
796 rtx adjust;
797 {
798 rtx temp;
799 adjust = protect_from_queue (adjust, 0);
800
801 if (adjust == const0_rtx)
802 return;
803
804 temp = expand_binop (Pmode,
805 #ifdef STACK_GROWS_DOWNWARD
806 add_optab,
807 #else
808 sub_optab,
809 #endif
810 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
811 OPTAB_LIB_WIDEN);
812
813 if (temp != stack_pointer_rtx)
814 emit_move_insn (stack_pointer_rtx, temp);
815 }
816
817 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
818 This pushes when ADJUST is positive. ADJUST need not be constant. */
819
820 void
821 anti_adjust_stack (adjust)
822 rtx adjust;
823 {
824 rtx temp;
825 adjust = protect_from_queue (adjust, 0);
826
827 if (adjust == const0_rtx)
828 return;
829
830 temp = expand_binop (Pmode,
831 #ifdef STACK_GROWS_DOWNWARD
832 sub_optab,
833 #else
834 add_optab,
835 #endif
836 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
837 OPTAB_LIB_WIDEN);
838
839 if (temp != stack_pointer_rtx)
840 emit_move_insn (stack_pointer_rtx, temp);
841 }
842
843 /* Round the size of a block to be pushed up to the boundary required
844 by this machine. SIZE is the desired size, which need not be constant. */
845
846 rtx
847 round_push (size)
848 rtx size;
849 {
850 #ifdef PREFERRED_STACK_BOUNDARY
851 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
852 if (align == 1)
853 return size;
854 if (GET_CODE (size) == CONST_INT)
855 {
856 int new = (INTVAL (size) + align - 1) / align * align;
857 if (INTVAL (size) != new)
858 size = GEN_INT (new);
859 }
860 else
861 {
862 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
863 but we know it can't. So add ourselves and then do
864 TRUNC_DIV_EXPR. */
865 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
866 NULL_RTX, 1, OPTAB_LIB_WIDEN);
867 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
868 NULL_RTX, 1);
869 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
870 }
871 #endif /* PREFERRED_STACK_BOUNDARY */
872 return size;
873 }
874 \f
875 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
876 to a previously-created save area. If no save area has been allocated,
877 this function will allocate one. If a save area is specified, it
878 must be of the proper mode.
879
880 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
881 are emitted at the current position. */
882
883 void
884 emit_stack_save (save_level, psave, after)
885 enum save_level save_level;
886 rtx *psave;
887 rtx after;
888 {
889 rtx sa = *psave;
890 /* The default is that we use a move insn and save in a Pmode object. */
891 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
892 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
893
894 /* See if this machine has anything special to do for this kind of save. */
895 switch (save_level)
896 {
897 #ifdef HAVE_save_stack_block
898 case SAVE_BLOCK:
899 if (HAVE_save_stack_block)
900 fcn = gen_save_stack_block;
901 break;
902 #endif
903 #ifdef HAVE_save_stack_function
904 case SAVE_FUNCTION:
905 if (HAVE_save_stack_function)
906 fcn = gen_save_stack_function;
907 break;
908 #endif
909 #ifdef HAVE_save_stack_nonlocal
910 case SAVE_NONLOCAL:
911 if (HAVE_save_stack_nonlocal)
912 fcn = gen_save_stack_nonlocal;
913 break;
914 #endif
915 default:
916 break;
917 }
918
919 /* If there is no save area and we have to allocate one, do so. Otherwise
920 verify the save area is the proper mode. */
921
922 if (sa == 0)
923 {
924 if (mode != VOIDmode)
925 {
926 if (save_level == SAVE_NONLOCAL)
927 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
928 else
929 *psave = sa = gen_reg_rtx (mode);
930 }
931 }
932 else
933 {
934 if (mode == VOIDmode || GET_MODE (sa) != mode)
935 abort ();
936 }
937
938 if (after)
939 {
940 rtx seq;
941
942 start_sequence ();
943 /* We must validize inside the sequence, to ensure that any instructions
944 created by the validize call also get moved to the right place. */
945 if (sa != 0)
946 sa = validize_mem (sa);
947 emit_insn (fcn (sa, stack_pointer_rtx));
948 seq = gen_sequence ();
949 end_sequence ();
950 emit_insn_after (seq, after);
951 }
952 else
953 {
954 if (sa != 0)
955 sa = validize_mem (sa);
956 emit_insn (fcn (sa, stack_pointer_rtx));
957 }
958 }
959
960 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
961 area made by emit_stack_save. If it is zero, we have nothing to do.
962
963 Put any emitted insns after insn AFTER, if nonzero, otherwise at
964 current position. */
965
966 void
967 emit_stack_restore (save_level, sa, after)
968 enum save_level save_level;
969 rtx after;
970 rtx sa;
971 {
972 /* The default is that we use a move insn. */
973 rtx (*fcn) PROTO ((rtx, rtx)) = gen_move_insn;
974
975 /* See if this machine has anything special to do for this kind of save. */
976 switch (save_level)
977 {
978 #ifdef HAVE_restore_stack_block
979 case SAVE_BLOCK:
980 if (HAVE_restore_stack_block)
981 fcn = gen_restore_stack_block;
982 break;
983 #endif
984 #ifdef HAVE_restore_stack_function
985 case SAVE_FUNCTION:
986 if (HAVE_restore_stack_function)
987 fcn = gen_restore_stack_function;
988 break;
989 #endif
990 #ifdef HAVE_restore_stack_nonlocal
991 case SAVE_NONLOCAL:
992 if (HAVE_restore_stack_nonlocal)
993 fcn = gen_restore_stack_nonlocal;
994 break;
995 #endif
996 default:
997 break;
998 }
999
1000 if (sa != 0)
1001 sa = validize_mem (sa);
1002
1003 if (after)
1004 {
1005 rtx seq;
1006
1007 start_sequence ();
1008 emit_insn (fcn (stack_pointer_rtx, sa));
1009 seq = gen_sequence ();
1010 end_sequence ();
1011 emit_insn_after (seq, after);
1012 }
1013 else
1014 emit_insn (fcn (stack_pointer_rtx, sa));
1015 }
1016 \f
1017 #ifdef SETJMP_VIA_SAVE_AREA
1018 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1019 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1020 platforms, the dynamic stack space used can corrupt the original
1021 frame, thus causing a crash if a longjmp unwinds to it. */
1022
1023 void
1024 optimize_save_area_alloca (insns)
1025 rtx insns;
1026 {
1027 rtx insn;
1028
1029 for (insn = insns; insn; insn = NEXT_INSN(insn))
1030 {
1031 rtx note;
1032
1033 if (GET_CODE (insn) != INSN)
1034 continue;
1035
1036 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1037 {
1038 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1039 continue;
1040
1041 if (!current_function_calls_setjmp)
1042 {
1043 rtx pat = PATTERN (insn);
1044
1045 /* If we do not see the note in a pattern matching
1046 these precise characteristics, we did something
1047 entirely wrong in allocate_dynamic_stack_space.
1048
1049 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1050 was defined on a machine where stacks grow towards higher
1051 addresses.
1052
1053 Right now only supported port with stack that grow upward
1054 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1055 if (GET_CODE (pat) != SET
1056 || SET_DEST (pat) != stack_pointer_rtx
1057 || GET_CODE (SET_SRC (pat)) != MINUS
1058 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1059 abort ();
1060
1061 /* This will now be transformed into a (set REG REG)
1062 so we can just blow away all the other notes. */
1063 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1064 REG_NOTES (insn) = NULL_RTX;
1065 }
1066 else
1067 {
1068 /* setjmp was called, we must remove the REG_SAVE_AREA
1069 note so that later passes do not get confused by its
1070 presence. */
1071 if (note == REG_NOTES (insn))
1072 {
1073 REG_NOTES (insn) = XEXP (note, 1);
1074 }
1075 else
1076 {
1077 rtx srch;
1078
1079 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1080 if (XEXP (srch, 1) == note)
1081 break;
1082
1083 if (srch == NULL_RTX)
1084 abort();
1085
1086 XEXP (srch, 1) = XEXP (note, 1);
1087 }
1088 }
1089 /* Once we've seen the note of interest, we need not look at
1090 the rest of them. */
1091 break;
1092 }
1093 }
1094 }
1095 #endif /* SETJMP_VIA_SAVE_AREA */
1096
1097 /* Return an rtx representing the address of an area of memory dynamically
1098 pushed on the stack. This region of memory is always aligned to
1099 a multiple of BIGGEST_ALIGNMENT.
1100
1101 Any required stack pointer alignment is preserved.
1102
1103 SIZE is an rtx representing the size of the area.
1104 TARGET is a place in which the address can be placed.
1105
1106 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1107
1108 rtx
1109 allocate_dynamic_stack_space (size, target, known_align)
1110 rtx size;
1111 rtx target;
1112 int known_align;
1113 {
1114 #ifdef SETJMP_VIA_SAVE_AREA
1115 rtx setjmpless_size = NULL_RTX;
1116 #endif
1117
1118 /* If we're asking for zero bytes, it doesn't matter what we point
1119 to since we can't dereference it. But return a reasonable
1120 address anyway. */
1121 if (size == const0_rtx)
1122 return virtual_stack_dynamic_rtx;
1123
1124 /* Otherwise, show we're calling alloca or equivalent. */
1125 current_function_calls_alloca = 1;
1126
1127 /* Ensure the size is in the proper mode. */
1128 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1129 size = convert_to_mode (Pmode, size, 1);
1130
1131 /* We will need to ensure that the address we return is aligned to
1132 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1133 always know its final value at this point in the compilation (it
1134 might depend on the size of the outgoing parameter lists, for
1135 example), so we must align the value to be returned in that case.
1136 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1137 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1138 We must also do an alignment operation on the returned value if
1139 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1140
1141 If we have to align, we must leave space in SIZE for the hole
1142 that might result from the alignment operation. */
1143
1144 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1145 #define MUST_ALIGN 1
1146 #else
1147 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1148 #endif
1149
1150 if (MUST_ALIGN)
1151 {
1152 if (GET_CODE (size) == CONST_INT)
1153 size = GEN_INT (INTVAL (size)
1154 + (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1));
1155 else
1156 size = expand_binop (Pmode, add_optab, size,
1157 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1158 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1159 }
1160
1161 #ifdef SETJMP_VIA_SAVE_AREA
1162 /* If setjmp restores regs from a save area in the stack frame,
1163 avoid clobbering the reg save area. Note that the offset of
1164 virtual_incoming_args_rtx includes the preallocated stack args space.
1165 It would be no problem to clobber that, but it's on the wrong side
1166 of the old save area. */
1167 {
1168 rtx dynamic_offset
1169 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1170 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1171
1172 if (!current_function_calls_setjmp)
1173 {
1174 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1175
1176 /* See optimize_save_area_alloca to understand what is being
1177 set up here. */
1178
1179 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1180 /* If anyone creates a target with these characteristics, let them
1181 know that our optimization cannot work correctly in such a case. */
1182 abort();
1183 #endif
1184
1185 if (GET_CODE (size) == CONST_INT)
1186 {
1187 int new = INTVAL (size) / align * align;
1188
1189 if (INTVAL (size) != new)
1190 setjmpless_size = GEN_INT (new);
1191 else
1192 setjmpless_size = size;
1193 }
1194 else
1195 {
1196 /* Since we know overflow is not possible, we avoid using
1197 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1198 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1199 GEN_INT (align), NULL_RTX, 1);
1200 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1201 GEN_INT (align), NULL_RTX, 1);
1202 }
1203 /* Our optimization works based upon being able to perform a simple
1204 transformation of this RTL into a (set REG REG) so make sure things
1205 did in fact end up in a REG. */
1206 if (!arith_operand (setjmpless_size, Pmode))
1207 setjmpless_size = force_reg (Pmode, setjmpless_size);
1208 }
1209
1210 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1211 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1212 }
1213 #endif /* SETJMP_VIA_SAVE_AREA */
1214
1215 /* Round the size to a multiple of the required stack alignment.
1216 Since the stack if presumed to be rounded before this allocation,
1217 this will maintain the required alignment.
1218
1219 If the stack grows downward, we could save an insn by subtracting
1220 SIZE from the stack pointer and then aligning the stack pointer.
1221 The problem with this is that the stack pointer may be unaligned
1222 between the execution of the subtraction and alignment insns and
1223 some machines do not allow this. Even on those that do, some
1224 signal handlers malfunction if a signal should occur between those
1225 insns. Since this is an extremely rare event, we have no reliable
1226 way of knowing which systems have this problem. So we avoid even
1227 momentarily mis-aligning the stack. */
1228
1229 #ifdef PREFERRED_STACK_BOUNDARY
1230 /* If we added a variable amount to SIZE,
1231 we can no longer assume it is aligned. */
1232 #if !defined (SETJMP_VIA_SAVE_AREA)
1233 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1234 #endif
1235 size = round_push (size);
1236 #endif
1237
1238 do_pending_stack_adjust ();
1239
1240 /* If needed, check that we have the required amount of stack. Take into
1241 account what has already been checked. */
1242 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1243 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1244
1245 /* Don't use a TARGET that isn't a pseudo. */
1246 if (target == 0 || GET_CODE (target) != REG
1247 || REGNO (target) < FIRST_PSEUDO_REGISTER)
1248 target = gen_reg_rtx (Pmode);
1249
1250 mark_reg_pointer (target, known_align / BITS_PER_UNIT);
1251
1252 /* Perform the required allocation from the stack. Some systems do
1253 this differently than simply incrementing/decrementing from the
1254 stack pointer, such as acquiring the space by calling malloc(). */
1255 #ifdef HAVE_allocate_stack
1256 if (HAVE_allocate_stack)
1257 {
1258 enum machine_mode mode = STACK_SIZE_MODE;
1259
1260 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][0]
1261 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][0])
1262 (target, Pmode)))
1263 target = copy_to_mode_reg (Pmode, target);
1264 size = convert_modes (mode, ptr_mode, size, 1);
1265 if (insn_operand_predicate[(int) CODE_FOR_allocate_stack][1]
1266 && ! ((*insn_operand_predicate[(int) CODE_FOR_allocate_stack][1])
1267 (size, mode)))
1268 size = copy_to_mode_reg (mode, size);
1269
1270 emit_insn (gen_allocate_stack (target, size));
1271 }
1272 else
1273 #endif
1274 {
1275 #ifndef STACK_GROWS_DOWNWARD
1276 emit_move_insn (target, virtual_stack_dynamic_rtx);
1277 #endif
1278 size = convert_modes (Pmode, ptr_mode, size, 1);
1279 anti_adjust_stack (size);
1280 #ifdef SETJMP_VIA_SAVE_AREA
1281 if (setjmpless_size != NULL_RTX)
1282 {
1283 rtx note_target = get_last_insn ();
1284
1285 REG_NOTES (note_target)
1286 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1287 REG_NOTES (note_target));
1288 }
1289 #endif /* SETJMP_VIA_SAVE_AREA */
1290 #ifdef STACK_GROWS_DOWNWARD
1291 emit_move_insn (target, virtual_stack_dynamic_rtx);
1292 #endif
1293 }
1294
1295 if (MUST_ALIGN)
1296 {
1297 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1298 but we know it can't. So add ourselves and then do
1299 TRUNC_DIV_EXPR. */
1300 target = expand_binop (Pmode, add_optab, target,
1301 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1302 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1303 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1304 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1305 NULL_RTX, 1);
1306 target = expand_mult (Pmode, target,
1307 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1308 NULL_RTX, 1);
1309 }
1310
1311 /* Some systems require a particular insn to refer to the stack
1312 to make the pages exist. */
1313 #ifdef HAVE_probe
1314 if (HAVE_probe)
1315 emit_insn (gen_probe ());
1316 #endif
1317
1318 /* Record the new stack level for nonlocal gotos. */
1319 if (nonlocal_goto_handler_slots != 0)
1320 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1321
1322 return target;
1323 }
1324 \f
1325 /* Emit one stack probe at ADDRESS, an address within the stack. */
1326
1327 static void
1328 emit_stack_probe (address)
1329 rtx address;
1330 {
1331 rtx memref = gen_rtx_MEM (word_mode, address);
1332
1333 MEM_VOLATILE_P (memref) = 1;
1334
1335 if (STACK_CHECK_PROBE_LOAD)
1336 emit_move_insn (gen_reg_rtx (word_mode), memref);
1337 else
1338 emit_move_insn (memref, const0_rtx);
1339 }
1340
1341 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1342 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1343 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1344 subtract from the stack. If SIZE is constant, this is done
1345 with a fixed number of probes. Otherwise, we must make a loop. */
1346
1347 #ifdef STACK_GROWS_DOWNWARD
1348 #define STACK_GROW_OP MINUS
1349 #else
1350 #define STACK_GROW_OP PLUS
1351 #endif
1352
1353 void
1354 probe_stack_range (first, size)
1355 HOST_WIDE_INT first;
1356 rtx size;
1357 {
1358 /* First see if we have an insn to check the stack. Use it if so. */
1359 #ifdef HAVE_check_stack
1360 if (HAVE_check_stack)
1361 {
1362 rtx last_addr
1363 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1364 stack_pointer_rtx,
1365 plus_constant (size, first)),
1366 NULL_RTX);
1367
1368 if (insn_operand_predicate[(int) CODE_FOR_check_stack][0]
1369 && ! ((*insn_operand_predicate[(int) CODE_FOR_check_stack][0])
1370 (last_address, Pmode)))
1371 last_address = copy_to_mode_reg (Pmode, last_address);
1372
1373 emit_insn (gen_check_stack (last_address));
1374 return;
1375 }
1376 #endif
1377
1378 /* If we have to generate explicit probes, see if we have a constant
1379 small number of them to generate. If so, that's the easy case. */
1380 if (GET_CODE (size) == CONST_INT
1381 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1382 {
1383 HOST_WIDE_INT offset;
1384
1385 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1386 for values of N from 1 until it exceeds LAST. If only one
1387 probe is needed, this will not generate any code. Then probe
1388 at LAST. */
1389 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1390 offset < INTVAL (size);
1391 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1392 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1393 stack_pointer_rtx,
1394 GEN_INT (offset)));
1395
1396 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1397 stack_pointer_rtx,
1398 plus_constant (size, first)));
1399 }
1400
1401 /* In the variable case, do the same as above, but in a loop. We emit loop
1402 notes so that loop optimization can be done. */
1403 else
1404 {
1405 rtx test_addr
1406 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1407 stack_pointer_rtx,
1408 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1409 NULL_RTX);
1410 rtx last_addr
1411 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1412 stack_pointer_rtx,
1413 plus_constant (size, first)),
1414 NULL_RTX);
1415 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1416 rtx loop_lab = gen_label_rtx ();
1417 rtx test_lab = gen_label_rtx ();
1418 rtx end_lab = gen_label_rtx ();
1419 rtx temp;
1420
1421 if (GET_CODE (test_addr) != REG
1422 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1423 test_addr = force_reg (Pmode, test_addr);
1424
1425 emit_note (NULL_PTR, NOTE_INSN_LOOP_BEG);
1426 emit_jump (test_lab);
1427
1428 emit_label (loop_lab);
1429 emit_stack_probe (test_addr);
1430
1431 emit_note (NULL_PTR, NOTE_INSN_LOOP_CONT);
1432
1433 #ifdef STACK_GROWS_DOWNWARD
1434 #define CMP_OPCODE GTU
1435 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1436 1, OPTAB_WIDEN);
1437 #else
1438 #define CMP_OPCODE LTU
1439 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1440 1, OPTAB_WIDEN);
1441 #endif
1442
1443 if (temp != test_addr)
1444 abort ();
1445
1446 emit_label (test_lab);
1447 emit_cmp_insn (test_addr, last_addr, CMP_OPCODE, NULL_RTX, Pmode, 1, 0);
1448 emit_jump_insn ((*bcc_gen_fctn[(int) CMP_OPCODE]) (loop_lab));
1449 emit_jump (end_lab);
1450 emit_note (NULL_PTR, NOTE_INSN_LOOP_END);
1451 emit_label (end_lab);
1452
1453 /* If will be doing stupid optimization, show test_addr is still live. */
1454 if (obey_regdecls)
1455 emit_insn (gen_rtx_USE (VOIDmode, test_addr));
1456
1457 emit_stack_probe (last_addr);
1458 }
1459 }
1460 \f
1461 /* Return an rtx representing the register or memory location
1462 in which a scalar value of data type VALTYPE
1463 was returned by a function call to function FUNC.
1464 FUNC is a FUNCTION_DECL node if the precise function is known,
1465 otherwise 0. */
1466
1467 rtx
1468 hard_function_value (valtype, func)
1469 tree valtype;
1470 tree func;
1471 {
1472 rtx val = FUNCTION_VALUE (valtype, func);
1473 if (GET_CODE (val) == REG
1474 && GET_MODE (val) == BLKmode)
1475 {
1476 int bytes = int_size_in_bytes (valtype);
1477 enum machine_mode tmpmode;
1478 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1479 tmpmode != MAX_MACHINE_MODE;
1480 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1481 {
1482 /* Have we found a large enough mode? */
1483 if (GET_MODE_SIZE (tmpmode) >= bytes)
1484 break;
1485 }
1486
1487 /* No suitable mode found. */
1488 if (tmpmode == MAX_MACHINE_MODE)
1489 abort ();
1490
1491 PUT_MODE (val, tmpmode);
1492 }
1493 return val;
1494 }
1495
1496 /* Return an rtx representing the register or memory location
1497 in which a scalar value of mode MODE was returned by a library call. */
1498
1499 rtx
1500 hard_libcall_value (mode)
1501 enum machine_mode mode;
1502 {
1503 return LIBCALL_VALUE (mode);
1504 }
1505
1506 /* Look up the tree code for a given rtx code
1507 to provide the arithmetic operation for REAL_ARITHMETIC.
1508 The function returns an int because the caller may not know
1509 what `enum tree_code' means. */
1510
1511 int
1512 rtx_to_tree_code (code)
1513 enum rtx_code code;
1514 {
1515 enum tree_code tcode;
1516
1517 switch (code)
1518 {
1519 case PLUS:
1520 tcode = PLUS_EXPR;
1521 break;
1522 case MINUS:
1523 tcode = MINUS_EXPR;
1524 break;
1525 case MULT:
1526 tcode = MULT_EXPR;
1527 break;
1528 case DIV:
1529 tcode = RDIV_EXPR;
1530 break;
1531 case SMIN:
1532 tcode = MIN_EXPR;
1533 break;
1534 case SMAX:
1535 tcode = MAX_EXPR;
1536 break;
1537 default:
1538 tcode = LAST_AND_UNUSED_TREE_CODE;
1539 break;
1540 }
1541 return ((int) tcode);
1542 }