]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
explow.c (plus_constant_wide, [...]): New case.
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987, 1991, 1994, 1995, 1996, 1997, 1998,
3 1999, 2000, 2001 Free Software Foundation, Inc.
4
5 This file is part of GNU CC.
6
7 GNU CC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 2, or (at your option)
10 any later version.
11
12 GNU CC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
16
17 You should have received a copy of the GNU General Public License
18 along with GNU CC; see the file COPYING. If not, write to
19 the Free Software Foundation, 59 Temple Place - Suite 330,
20 Boston, MA 02111-1307, USA. */
21
22
23 #include "config.h"
24 #include "system.h"
25 #include "toplev.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "tm_p.h"
29 #include "flags.h"
30 #include "function.h"
31 #include "expr.h"
32 #include "hard-reg-set.h"
33 #include "insn-config.h"
34 #include "recog.h"
35
36 #if !defined PREFERRED_STACK_BOUNDARY && defined STACK_BOUNDARY
37 #define PREFERRED_STACK_BOUNDARY STACK_BOUNDARY
38 #endif
39
40 static rtx break_out_memory_refs PARAMS ((rtx));
41 static void emit_stack_probe PARAMS ((rtx));
42
43
44 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
45
46 HOST_WIDE_INT
47 trunc_int_for_mode (c, mode)
48 HOST_WIDE_INT c;
49 enum machine_mode mode;
50 {
51 int width = GET_MODE_BITSIZE (mode);
52
53 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
54 if (mode == BImode)
55 return c & 1 ? STORE_FLAG_VALUE : 0;
56
57 /* Sign-extend for the requested mode. */
58
59 if (width < HOST_BITS_PER_WIDE_INT)
60 {
61 HOST_WIDE_INT sign = 1;
62 sign <<= width - 1;
63 c &= (sign << 1) - 1;
64 c ^= sign;
65 c -= sign;
66 }
67
68 return c;
69 }
70
71 /* Return an rtx for the sum of X and the integer C.
72
73 This function should be used via the `plus_constant' macro. */
74
75 rtx
76 plus_constant_wide (x, c)
77 register rtx x;
78 register HOST_WIDE_INT c;
79 {
80 register RTX_CODE code;
81 register enum machine_mode mode;
82 register rtx tem;
83 int all_constant = 0;
84
85 if (c == 0)
86 return x;
87
88 restart:
89
90 code = GET_CODE (x);
91 mode = GET_MODE (x);
92 switch (code)
93 {
94 case CONST_INT:
95 return GEN_INT (INTVAL (x) + c);
96
97 case CONST_DOUBLE:
98 {
99 unsigned HOST_WIDE_INT l1 = CONST_DOUBLE_LOW (x);
100 HOST_WIDE_INT h1 = CONST_DOUBLE_HIGH (x);
101 unsigned HOST_WIDE_INT l2 = c;
102 HOST_WIDE_INT h2 = c < 0 ? ~0 : 0;
103 unsigned HOST_WIDE_INT lv;
104 HOST_WIDE_INT hv;
105
106 add_double (l1, h1, l2, h2, &lv, &hv);
107
108 return immed_double_const (lv, hv, VOIDmode);
109 }
110
111 case MEM:
112 /* If this is a reference to the constant pool, try replacing it with
113 a reference to a new constant. If the resulting address isn't
114 valid, don't return it because we have no way to validize it. */
115 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
116 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
117 {
118 tem
119 = force_const_mem (GET_MODE (x),
120 plus_constant (get_pool_constant (XEXP (x, 0)),
121 c));
122 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
123 return tem;
124 }
125 break;
126
127 case CONST:
128 /* If adding to something entirely constant, set a flag
129 so that we can add a CONST around the result. */
130 x = XEXP (x, 0);
131 all_constant = 1;
132 goto restart;
133
134 case SYMBOL_REF:
135 case LABEL_REF:
136 all_constant = 1;
137 break;
138
139 case PLUS:
140 /* The interesting case is adding the integer to a sum.
141 Look for constant term in the sum and combine
142 with C. For an integer constant term, we make a combined
143 integer. For a constant term that is not an explicit integer,
144 we cannot really combine, but group them together anyway.
145
146 Restart or use a recursive call in case the remaining operand is
147 something that we handle specially, such as a SYMBOL_REF.
148
149 We may not immediately return from the recursive call here, lest
150 all_constant gets lost. */
151
152 if (GET_CODE (XEXP (x, 1)) == CONST_INT)
153 {
154 c += INTVAL (XEXP (x, 1));
155
156 if (GET_MODE (x) != VOIDmode)
157 c = trunc_int_for_mode (c, GET_MODE (x));
158
159 x = XEXP (x, 0);
160 goto restart;
161 }
162 else if (CONSTANT_P (XEXP (x, 0)))
163 {
164 x = gen_rtx_PLUS (mode,
165 plus_constant (XEXP (x, 0), c),
166 XEXP (x, 1));
167 c = 0;
168 }
169 else if (CONSTANT_P (XEXP (x, 1)))
170 {
171 x = gen_rtx_PLUS (mode,
172 XEXP (x, 0),
173 plus_constant (XEXP (x, 1), c));
174 c = 0;
175 }
176 break;
177
178 case LO_SUM:
179 return gen_rtx_LO_SUM (mode, XEXP (x, 0),
180 plus_constant (XEXP (x, 1), c));
181
182
183 default:
184 break;
185 }
186
187 if (c != 0)
188 x = gen_rtx_PLUS (mode, x, GEN_INT (c));
189
190 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
191 return x;
192 else if (all_constant)
193 return gen_rtx_CONST (mode, x);
194 else
195 return x;
196 }
197 \f
198 /* If X is a sum, return a new sum like X but lacking any constant terms.
199 Add all the removed constant terms into *CONSTPTR.
200 X itself is not altered. The result != X if and only if
201 it is not isomorphic to X. */
202
203 rtx
204 eliminate_constant_term (x, constptr)
205 rtx x;
206 rtx *constptr;
207 {
208 register rtx x0, x1;
209 rtx tem;
210
211 if (GET_CODE (x) != PLUS)
212 return x;
213
214 /* First handle constants appearing at this level explicitly. */
215 if (GET_CODE (XEXP (x, 1)) == CONST_INT
216 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
217 XEXP (x, 1)))
218 && GET_CODE (tem) == CONST_INT)
219 {
220 *constptr = tem;
221 return eliminate_constant_term (XEXP (x, 0), constptr);
222 }
223
224 tem = const0_rtx;
225 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
226 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
227 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
228 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
229 *constptr, tem))
230 && GET_CODE (tem) == CONST_INT)
231 {
232 *constptr = tem;
233 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
234 }
235
236 return x;
237 }
238
239 /* Returns the insn that next references REG after INSN, or 0
240 if REG is clobbered before next referenced or we cannot find
241 an insn that references REG in a straight-line piece of code. */
242
243 rtx
244 find_next_ref (reg, insn)
245 rtx reg;
246 rtx insn;
247 {
248 rtx next;
249
250 for (insn = NEXT_INSN (insn); insn; insn = next)
251 {
252 next = NEXT_INSN (insn);
253 if (GET_CODE (insn) == NOTE)
254 continue;
255 if (GET_CODE (insn) == CODE_LABEL
256 || GET_CODE (insn) == BARRIER)
257 return 0;
258 if (GET_CODE (insn) == INSN
259 || GET_CODE (insn) == JUMP_INSN
260 || GET_CODE (insn) == CALL_INSN)
261 {
262 if (reg_set_p (reg, insn))
263 return 0;
264 if (reg_mentioned_p (reg, PATTERN (insn)))
265 return insn;
266 if (GET_CODE (insn) == JUMP_INSN)
267 {
268 if (any_uncondjump_p (insn))
269 next = JUMP_LABEL (insn);
270 else
271 return 0;
272 }
273 if (GET_CODE (insn) == CALL_INSN
274 && REGNO (reg) < FIRST_PSEUDO_REGISTER
275 && call_used_regs[REGNO (reg)])
276 return 0;
277 }
278 else
279 abort ();
280 }
281 return 0;
282 }
283
284 /* Return an rtx for the size in bytes of the value of EXP. */
285
286 rtx
287 expr_size (exp)
288 tree exp;
289 {
290 tree size;
291
292 if (TREE_CODE_CLASS (TREE_CODE (exp)) == 'd'
293 && DECL_SIZE_UNIT (exp) != 0)
294 size = DECL_SIZE_UNIT (exp);
295 else
296 size = size_in_bytes (TREE_TYPE (exp));
297
298 if (TREE_CODE (size) != INTEGER_CST
299 && contains_placeholder_p (size))
300 size = build (WITH_RECORD_EXPR, sizetype, size, exp);
301
302 return expand_expr (size, NULL_RTX, TYPE_MODE (sizetype),
303 EXPAND_MEMORY_USE_BAD);
304 }
305 \f
306 /* Return a copy of X in which all memory references
307 and all constants that involve symbol refs
308 have been replaced with new temporary registers.
309 Also emit code to load the memory locations and constants
310 into those registers.
311
312 If X contains no such constants or memory references,
313 X itself (not a copy) is returned.
314
315 If a constant is found in the address that is not a legitimate constant
316 in an insn, it is left alone in the hope that it might be valid in the
317 address.
318
319 X may contain no arithmetic except addition, subtraction and multiplication.
320 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
321
322 static rtx
323 break_out_memory_refs (x)
324 register rtx x;
325 {
326 if (GET_CODE (x) == MEM
327 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
328 && GET_MODE (x) != VOIDmode))
329 x = force_reg (GET_MODE (x), x);
330 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
331 || GET_CODE (x) == MULT)
332 {
333 register rtx op0 = break_out_memory_refs (XEXP (x, 0));
334 register rtx op1 = break_out_memory_refs (XEXP (x, 1));
335
336 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
337 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
338 }
339
340 return x;
341 }
342
343 #ifdef POINTERS_EXTEND_UNSIGNED
344
345 /* Given X, a memory address in ptr_mode, convert it to an address
346 in Pmode, or vice versa (TO_MODE says which way). We take advantage of
347 the fact that pointers are not allowed to overflow by commuting arithmetic
348 operations over conversions so that address arithmetic insns can be
349 used. */
350
351 rtx
352 convert_memory_address (to_mode, x)
353 enum machine_mode to_mode;
354 rtx x;
355 {
356 enum machine_mode from_mode = to_mode == ptr_mode ? Pmode : ptr_mode;
357 rtx temp;
358
359 /* Here we handle some special cases. If none of them apply, fall through
360 to the default case. */
361 switch (GET_CODE (x))
362 {
363 case CONST_INT:
364 case CONST_DOUBLE:
365 return x;
366
367 case SUBREG:
368 if (GET_MODE (SUBREG_REG (x)) == to_mode)
369 return SUBREG_REG (x);
370 break;
371
372 case LABEL_REF:
373 temp = gen_rtx_LABEL_REF (to_mode, XEXP (x, 0));
374 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
375 return temp;
376
377 case SYMBOL_REF:
378 temp = gen_rtx_SYMBOL_REF (to_mode, XSTR (x, 0));
379 SYMBOL_REF_FLAG (temp) = SYMBOL_REF_FLAG (x);
380 CONSTANT_POOL_ADDRESS_P (temp) = CONSTANT_POOL_ADDRESS_P (x);
381 STRING_POOL_ADDRESS_P (temp) = STRING_POOL_ADDRESS_P (x);
382 return temp;
383
384 case CONST:
385 return gen_rtx_CONST (to_mode,
386 convert_memory_address (to_mode, XEXP (x, 0)));
387
388 case PLUS:
389 case MULT:
390 /* For addition the second operand is a small constant, we can safely
391 permute the conversion and addition operation. We can always safely
392 permute them if we are making the address narrower. In addition,
393 always permute the operations if this is a constant. */
394 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
395 || (GET_CODE (x) == PLUS && GET_CODE (XEXP (x, 1)) == CONST_INT
396 && (INTVAL (XEXP (x, 1)) + 20000 < 40000
397 || CONSTANT_P (XEXP (x, 0)))))
398 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
399 convert_memory_address (to_mode, XEXP (x, 0)),
400 convert_memory_address (to_mode, XEXP (x, 1)));
401 break;
402
403 default:
404 break;
405 }
406
407 return convert_modes (to_mode, from_mode,
408 x, POINTERS_EXTEND_UNSIGNED);
409 }
410 #endif
411
412 /* Given a memory address or facsimile X, construct a new address,
413 currently equivalent, that is stable: future stores won't change it.
414
415 X must be composed of constants, register and memory references
416 combined with addition, subtraction and multiplication:
417 in other words, just what you can get from expand_expr if sum_ok is 1.
418
419 Works by making copies of all regs and memory locations used
420 by X and combining them the same way X does.
421 You could also stabilize the reference to this address
422 by copying the address to a register with copy_to_reg;
423 but then you wouldn't get indexed addressing in the reference. */
424
425 rtx
426 copy_all_regs (x)
427 register rtx x;
428 {
429 if (GET_CODE (x) == REG)
430 {
431 if (REGNO (x) != FRAME_POINTER_REGNUM
432 #if HARD_FRAME_POINTER_REGNUM != FRAME_POINTER_REGNUM
433 && REGNO (x) != HARD_FRAME_POINTER_REGNUM
434 #endif
435 )
436 x = copy_to_reg (x);
437 }
438 else if (GET_CODE (x) == MEM)
439 x = copy_to_reg (x);
440 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
441 || GET_CODE (x) == MULT)
442 {
443 register rtx op0 = copy_all_regs (XEXP (x, 0));
444 register rtx op1 = copy_all_regs (XEXP (x, 1));
445 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
446 x = gen_rtx_fmt_ee (GET_CODE (x), Pmode, op0, op1);
447 }
448 return x;
449 }
450 \f
451 /* Return something equivalent to X but valid as a memory address
452 for something of mode MODE. When X is not itself valid, this
453 works by copying X or subexpressions of it into registers. */
454
455 rtx
456 memory_address (mode, x)
457 enum machine_mode mode;
458 register rtx x;
459 {
460 register rtx oldx = x;
461
462 if (GET_CODE (x) == ADDRESSOF)
463 return x;
464
465 #ifdef POINTERS_EXTEND_UNSIGNED
466 if (GET_MODE (x) == ptr_mode)
467 x = convert_memory_address (Pmode, x);
468 #endif
469
470 /* By passing constant addresses thru registers
471 we get a chance to cse them. */
472 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
473 x = force_reg (Pmode, x);
474
475 /* Accept a QUEUED that refers to a REG
476 even though that isn't a valid address.
477 On attempting to put this in an insn we will call protect_from_queue
478 which will turn it into a REG, which is valid. */
479 else if (GET_CODE (x) == QUEUED
480 && GET_CODE (QUEUED_VAR (x)) == REG)
481 ;
482
483 /* We get better cse by rejecting indirect addressing at this stage.
484 Let the combiner create indirect addresses where appropriate.
485 For now, generate the code so that the subexpressions useful to share
486 are visible. But not if cse won't be done! */
487 else
488 {
489 if (! cse_not_expected && GET_CODE (x) != REG)
490 x = break_out_memory_refs (x);
491
492 /* At this point, any valid address is accepted. */
493 GO_IF_LEGITIMATE_ADDRESS (mode, x, win);
494
495 /* If it was valid before but breaking out memory refs invalidated it,
496 use it the old way. */
497 if (memory_address_p (mode, oldx))
498 goto win2;
499
500 /* Perform machine-dependent transformations on X
501 in certain cases. This is not necessary since the code
502 below can handle all possible cases, but machine-dependent
503 transformations can make better code. */
504 LEGITIMIZE_ADDRESS (x, oldx, mode, win);
505
506 /* PLUS and MULT can appear in special ways
507 as the result of attempts to make an address usable for indexing.
508 Usually they are dealt with by calling force_operand, below.
509 But a sum containing constant terms is special
510 if removing them makes the sum a valid address:
511 then we generate that address in a register
512 and index off of it. We do this because it often makes
513 shorter code, and because the addresses thus generated
514 in registers often become common subexpressions. */
515 if (GET_CODE (x) == PLUS)
516 {
517 rtx constant_term = const0_rtx;
518 rtx y = eliminate_constant_term (x, &constant_term);
519 if (constant_term == const0_rtx
520 || ! memory_address_p (mode, y))
521 x = force_operand (x, NULL_RTX);
522 else
523 {
524 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
525 if (! memory_address_p (mode, y))
526 x = force_operand (x, NULL_RTX);
527 else
528 x = y;
529 }
530 }
531
532 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
533 x = force_operand (x, NULL_RTX);
534
535 /* If we have a register that's an invalid address,
536 it must be a hard reg of the wrong class. Copy it to a pseudo. */
537 else if (GET_CODE (x) == REG)
538 x = copy_to_reg (x);
539
540 /* Last resort: copy the value to a register, since
541 the register is a valid address. */
542 else
543 x = force_reg (Pmode, x);
544
545 goto done;
546
547 win2:
548 x = oldx;
549 win:
550 if (flag_force_addr && ! cse_not_expected && GET_CODE (x) != REG
551 /* Don't copy an addr via a reg if it is one of our stack slots. */
552 && ! (GET_CODE (x) == PLUS
553 && (XEXP (x, 0) == virtual_stack_vars_rtx
554 || XEXP (x, 0) == virtual_incoming_args_rtx)))
555 {
556 if (general_operand (x, Pmode))
557 x = force_reg (Pmode, x);
558 else
559 x = force_operand (x, NULL_RTX);
560 }
561 }
562
563 done:
564
565 /* If we didn't change the address, we are done. Otherwise, mark
566 a reg as a pointer if we have REG or REG + CONST_INT. */
567 if (oldx == x)
568 return x;
569 else if (GET_CODE (x) == REG)
570 mark_reg_pointer (x, BITS_PER_UNIT);
571 else if (GET_CODE (x) == PLUS
572 && GET_CODE (XEXP (x, 0)) == REG
573 && GET_CODE (XEXP (x, 1)) == CONST_INT)
574 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
575
576 /* OLDX may have been the address on a temporary. Update the address
577 to indicate that X is now used. */
578 update_temp_slot_address (oldx, x);
579
580 return x;
581 }
582
583 /* Like `memory_address' but pretend `flag_force_addr' is 0. */
584
585 rtx
586 memory_address_noforce (mode, x)
587 enum machine_mode mode;
588 rtx x;
589 {
590 int ambient_force_addr = flag_force_addr;
591 rtx val;
592
593 flag_force_addr = 0;
594 val = memory_address (mode, x);
595 flag_force_addr = ambient_force_addr;
596 return val;
597 }
598
599 /* Convert a mem ref into one with a valid memory address.
600 Pass through anything else unchanged. */
601
602 rtx
603 validize_mem (ref)
604 rtx ref;
605 {
606 if (GET_CODE (ref) != MEM)
607 return ref;
608 if (memory_address_p (GET_MODE (ref), XEXP (ref, 0)))
609 return ref;
610 /* Don't alter REF itself, since that is probably a stack slot. */
611 return change_address (ref, GET_MODE (ref), XEXP (ref, 0));
612 }
613 \f
614 /* Given REF, either a MEM or a REG, and T, either the type of X or
615 the expression corresponding to REF, set RTX_UNCHANGING_P if
616 appropriate. */
617
618 void
619 maybe_set_unchanging (ref, t)
620 rtx ref;
621 tree t;
622 {
623 /* We can set RTX_UNCHANGING_P from TREE_READONLY for decls whose
624 initialization is only executed once, or whose initializer always
625 has the same value. Currently we simplify this to PARM_DECLs in the
626 first case, and decls with TREE_CONSTANT initializers in the second. */
627 if ((TREE_READONLY (t) && DECL_P (t)
628 && (TREE_CODE (t) == PARM_DECL
629 || DECL_INITIAL (t) == NULL_TREE
630 || TREE_CONSTANT (DECL_INITIAL (t))))
631 || TREE_CODE_CLASS (TREE_CODE (t)) == 'c')
632 RTX_UNCHANGING_P (ref) = 1;
633 }
634
635 /* Given REF, a MEM, and T, either the type of X or the expression
636 corresponding to REF, set the memory attributes. OBJECTP is nonzero
637 if we are making a new object of this type. */
638
639 void
640 set_mem_attributes (ref, t, objectp)
641 rtx ref;
642 tree t;
643 int objectp;
644 {
645 tree type;
646
647 /* It can happen that type_for_mode was given a mode for which there
648 is no language-level type. In which case it returns NULL, which
649 we can see here. */
650 if (t == NULL_TREE)
651 return;
652
653 type = TYPE_P (t) ? t : TREE_TYPE (t);
654
655 /* Get the alias set from the expression or type (perhaps using a
656 front-end routine) and then copy bits from the type. */
657
658 /* It is incorrect to set RTX_UNCHANGING_P from TREE_READONLY (type)
659 here, because, in C and C++, the fact that a location is accessed
660 through a const expression does not mean that the value there can
661 never change. */
662 MEM_ALIAS_SET (ref) = get_alias_set (t);
663 MEM_VOLATILE_P (ref) = TYPE_VOLATILE (type);
664 MEM_IN_STRUCT_P (ref) = AGGREGATE_TYPE_P (type);
665
666 /* If we are making an object of this type, we know that it is a scalar if
667 the type is not an aggregate. */
668 if (objectp && ! AGGREGATE_TYPE_P (type))
669 MEM_SCALAR_P (ref) = 1;
670
671 /* If T is a type, this is all we can do. Otherwise, we may be able
672 to deduce some more information about the expression. */
673 if (TYPE_P (t))
674 return;
675
676 maybe_set_unchanging (ref, t);
677 if (TREE_THIS_VOLATILE (t))
678 MEM_VOLATILE_P (ref) = 1;
679
680 /* Now see if we can say more about whether it's an aggregate or
681 scalar. If we already know it's an aggregate, don't bother. */
682 if (MEM_IN_STRUCT_P (ref))
683 return;
684
685 /* Now remove any NOPs: they don't change what the underlying object is.
686 Likewise for SAVE_EXPR. */
687 while (TREE_CODE (t) == NOP_EXPR || TREE_CODE (t) == CONVERT_EXPR
688 || TREE_CODE (t) == NON_LVALUE_EXPR || TREE_CODE (t) == SAVE_EXPR)
689 t = TREE_OPERAND (t, 0);
690
691 /* Since we already know the type isn't an aggregate, if this is a decl,
692 it must be a scalar. Or if it is a reference into an aggregate,
693 this is part of an aggregate. Otherwise we don't know. */
694 if (DECL_P (t))
695 MEM_SCALAR_P (ref) = 1;
696 else if (TREE_CODE (t) == COMPONENT_REF || TREE_CODE (t) == ARRAY_REF
697 || TREE_CODE (t) == ARRAY_RANGE_REF
698 || TREE_CODE (t) == BIT_FIELD_REF)
699 MEM_IN_STRUCT_P (ref) = 1;
700 }
701 \f
702 /* Return a modified copy of X with its memory address copied
703 into a temporary register to protect it from side effects.
704 If X is not a MEM, it is returned unchanged (and not copied).
705 Perhaps even if it is a MEM, if there is no need to change it. */
706
707 rtx
708 stabilize (x)
709 rtx x;
710 {
711 register rtx addr;
712
713 if (GET_CODE (x) != MEM)
714 return x;
715
716 addr = XEXP (x, 0);
717 if (rtx_unstable_p (addr))
718 {
719 rtx temp = force_reg (Pmode, copy_all_regs (addr));
720 rtx mem = gen_rtx_MEM (GET_MODE (x), temp);
721
722 MEM_COPY_ATTRIBUTES (mem, x);
723 return mem;
724 }
725 return x;
726 }
727 \f
728 /* Copy the value or contents of X to a new temp reg and return that reg. */
729
730 rtx
731 copy_to_reg (x)
732 rtx x;
733 {
734 register rtx temp = gen_reg_rtx (GET_MODE (x));
735
736 /* If not an operand, must be an address with PLUS and MULT so
737 do the computation. */
738 if (! general_operand (x, VOIDmode))
739 x = force_operand (x, temp);
740
741 if (x != temp)
742 emit_move_insn (temp, x);
743
744 return temp;
745 }
746
747 /* Like copy_to_reg but always give the new register mode Pmode
748 in case X is a constant. */
749
750 rtx
751 copy_addr_to_reg (x)
752 rtx x;
753 {
754 return copy_to_mode_reg (Pmode, x);
755 }
756
757 /* Like copy_to_reg but always give the new register mode MODE
758 in case X is a constant. */
759
760 rtx
761 copy_to_mode_reg (mode, x)
762 enum machine_mode mode;
763 rtx x;
764 {
765 register rtx temp = gen_reg_rtx (mode);
766
767 /* If not an operand, must be an address with PLUS and MULT so
768 do the computation. */
769 if (! general_operand (x, VOIDmode))
770 x = force_operand (x, temp);
771
772 if (GET_MODE (x) != mode && GET_MODE (x) != VOIDmode)
773 abort ();
774 if (x != temp)
775 emit_move_insn (temp, x);
776 return temp;
777 }
778
779 /* Load X into a register if it is not already one.
780 Use mode MODE for the register.
781 X should be valid for mode MODE, but it may be a constant which
782 is valid for all integer modes; that's why caller must specify MODE.
783
784 The caller must not alter the value in the register we return,
785 since we mark it as a "constant" register. */
786
787 rtx
788 force_reg (mode, x)
789 enum machine_mode mode;
790 rtx x;
791 {
792 register rtx temp, insn, set;
793
794 if (GET_CODE (x) == REG)
795 return x;
796
797 temp = gen_reg_rtx (mode);
798
799 if (! general_operand (x, mode))
800 x = force_operand (x, NULL_RTX);
801
802 insn = emit_move_insn (temp, x);
803
804 /* Let optimizers know that TEMP's value never changes
805 and that X can be substituted for it. Don't get confused
806 if INSN set something else (such as a SUBREG of TEMP). */
807 if (CONSTANT_P (x)
808 && (set = single_set (insn)) != 0
809 && SET_DEST (set) == temp)
810 {
811 rtx note = find_reg_note (insn, REG_EQUAL, NULL_RTX);
812
813 if (note)
814 XEXP (note, 0) = x;
815 else
816 REG_NOTES (insn) = gen_rtx_EXPR_LIST (REG_EQUAL, x, REG_NOTES (insn));
817 }
818 return temp;
819 }
820
821 /* If X is a memory ref, copy its contents to a new temp reg and return
822 that reg. Otherwise, return X. */
823
824 rtx
825 force_not_mem (x)
826 rtx x;
827 {
828 register rtx temp;
829
830 if (GET_CODE (x) != MEM || GET_MODE (x) == BLKmode)
831 return x;
832
833 temp = gen_reg_rtx (GET_MODE (x));
834 emit_move_insn (temp, x);
835 return temp;
836 }
837
838 /* Copy X to TARGET (if it's nonzero and a reg)
839 or to a new temp reg and return that reg.
840 MODE is the mode to use for X in case it is a constant. */
841
842 rtx
843 copy_to_suggested_reg (x, target, mode)
844 rtx x, target;
845 enum machine_mode mode;
846 {
847 register rtx temp;
848
849 if (target && GET_CODE (target) == REG)
850 temp = target;
851 else
852 temp = gen_reg_rtx (mode);
853
854 emit_move_insn (temp, x);
855 return temp;
856 }
857 \f
858 /* Return the mode to use to store a scalar of TYPE and MODE.
859 PUNSIGNEDP points to the signedness of the type and may be adjusted
860 to show what signedness to use on extension operations.
861
862 FOR_CALL is non-zero if this call is promoting args for a call. */
863
864 enum machine_mode
865 promote_mode (type, mode, punsignedp, for_call)
866 tree type;
867 enum machine_mode mode;
868 int *punsignedp;
869 int for_call ATTRIBUTE_UNUSED;
870 {
871 enum tree_code code = TREE_CODE (type);
872 int unsignedp = *punsignedp;
873
874 #ifdef PROMOTE_FOR_CALL_ONLY
875 if (! for_call)
876 return mode;
877 #endif
878
879 switch (code)
880 {
881 #ifdef PROMOTE_MODE
882 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
883 case CHAR_TYPE: case REAL_TYPE: case OFFSET_TYPE:
884 PROMOTE_MODE (mode, unsignedp, type);
885 break;
886 #endif
887
888 #ifdef POINTERS_EXTEND_UNSIGNED
889 case REFERENCE_TYPE:
890 case POINTER_TYPE:
891 mode = Pmode;
892 unsignedp = POINTERS_EXTEND_UNSIGNED;
893 break;
894 #endif
895
896 default:
897 break;
898 }
899
900 *punsignedp = unsignedp;
901 return mode;
902 }
903 \f
904 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
905 This pops when ADJUST is positive. ADJUST need not be constant. */
906
907 void
908 adjust_stack (adjust)
909 rtx adjust;
910 {
911 rtx temp;
912 adjust = protect_from_queue (adjust, 0);
913
914 if (adjust == const0_rtx)
915 return;
916
917 /* We expect all variable sized adjustments to be multiple of
918 PREFERRED_STACK_BOUNDARY. */
919 if (GET_CODE (adjust) == CONST_INT)
920 stack_pointer_delta -= INTVAL (adjust);
921
922 temp = expand_binop (Pmode,
923 #ifdef STACK_GROWS_DOWNWARD
924 add_optab,
925 #else
926 sub_optab,
927 #endif
928 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
929 OPTAB_LIB_WIDEN);
930
931 if (temp != stack_pointer_rtx)
932 emit_move_insn (stack_pointer_rtx, temp);
933 }
934
935 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
936 This pushes when ADJUST is positive. ADJUST need not be constant. */
937
938 void
939 anti_adjust_stack (adjust)
940 rtx adjust;
941 {
942 rtx temp;
943 adjust = protect_from_queue (adjust, 0);
944
945 if (adjust == const0_rtx)
946 return;
947
948 /* We expect all variable sized adjustments to be multiple of
949 PREFERRED_STACK_BOUNDARY. */
950 if (GET_CODE (adjust) == CONST_INT)
951 stack_pointer_delta += INTVAL (adjust);
952
953 temp = expand_binop (Pmode,
954 #ifdef STACK_GROWS_DOWNWARD
955 sub_optab,
956 #else
957 add_optab,
958 #endif
959 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
960 OPTAB_LIB_WIDEN);
961
962 if (temp != stack_pointer_rtx)
963 emit_move_insn (stack_pointer_rtx, temp);
964 }
965
966 /* Round the size of a block to be pushed up to the boundary required
967 by this machine. SIZE is the desired size, which need not be constant. */
968
969 rtx
970 round_push (size)
971 rtx size;
972 {
973 #ifdef PREFERRED_STACK_BOUNDARY
974 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
975 if (align == 1)
976 return size;
977 if (GET_CODE (size) == CONST_INT)
978 {
979 int new = (INTVAL (size) + align - 1) / align * align;
980 if (INTVAL (size) != new)
981 size = GEN_INT (new);
982 }
983 else
984 {
985 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
986 but we know it can't. So add ourselves and then do
987 TRUNC_DIV_EXPR. */
988 size = expand_binop (Pmode, add_optab, size, GEN_INT (align - 1),
989 NULL_RTX, 1, OPTAB_LIB_WIDEN);
990 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, GEN_INT (align),
991 NULL_RTX, 1);
992 size = expand_mult (Pmode, size, GEN_INT (align), NULL_RTX, 1);
993 }
994 #endif /* PREFERRED_STACK_BOUNDARY */
995 return size;
996 }
997 \f
998 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
999 to a previously-created save area. If no save area has been allocated,
1000 this function will allocate one. If a save area is specified, it
1001 must be of the proper mode.
1002
1003 The insns are emitted after insn AFTER, if nonzero, otherwise the insns
1004 are emitted at the current position. */
1005
1006 void
1007 emit_stack_save (save_level, psave, after)
1008 enum save_level save_level;
1009 rtx *psave;
1010 rtx after;
1011 {
1012 rtx sa = *psave;
1013 /* The default is that we use a move insn and save in a Pmode object. */
1014 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1015 enum machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1016
1017 /* See if this machine has anything special to do for this kind of save. */
1018 switch (save_level)
1019 {
1020 #ifdef HAVE_save_stack_block
1021 case SAVE_BLOCK:
1022 if (HAVE_save_stack_block)
1023 fcn = gen_save_stack_block;
1024 break;
1025 #endif
1026 #ifdef HAVE_save_stack_function
1027 case SAVE_FUNCTION:
1028 if (HAVE_save_stack_function)
1029 fcn = gen_save_stack_function;
1030 break;
1031 #endif
1032 #ifdef HAVE_save_stack_nonlocal
1033 case SAVE_NONLOCAL:
1034 if (HAVE_save_stack_nonlocal)
1035 fcn = gen_save_stack_nonlocal;
1036 break;
1037 #endif
1038 default:
1039 break;
1040 }
1041
1042 /* If there is no save area and we have to allocate one, do so. Otherwise
1043 verify the save area is the proper mode. */
1044
1045 if (sa == 0)
1046 {
1047 if (mode != VOIDmode)
1048 {
1049 if (save_level == SAVE_NONLOCAL)
1050 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1051 else
1052 *psave = sa = gen_reg_rtx (mode);
1053 }
1054 }
1055 else
1056 {
1057 if (mode == VOIDmode || GET_MODE (sa) != mode)
1058 abort ();
1059 }
1060
1061 if (after)
1062 {
1063 rtx seq;
1064
1065 start_sequence ();
1066 /* We must validize inside the sequence, to ensure that any instructions
1067 created by the validize call also get moved to the right place. */
1068 if (sa != 0)
1069 sa = validize_mem (sa);
1070 emit_insn (fcn (sa, stack_pointer_rtx));
1071 seq = gen_sequence ();
1072 end_sequence ();
1073 emit_insn_after (seq, after);
1074 }
1075 else
1076 {
1077 if (sa != 0)
1078 sa = validize_mem (sa);
1079 emit_insn (fcn (sa, stack_pointer_rtx));
1080 }
1081 }
1082
1083 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1084 area made by emit_stack_save. If it is zero, we have nothing to do.
1085
1086 Put any emitted insns after insn AFTER, if nonzero, otherwise at
1087 current position. */
1088
1089 void
1090 emit_stack_restore (save_level, sa, after)
1091 enum save_level save_level;
1092 rtx after;
1093 rtx sa;
1094 {
1095 /* The default is that we use a move insn. */
1096 rtx (*fcn) PARAMS ((rtx, rtx)) = gen_move_insn;
1097
1098 /* See if this machine has anything special to do for this kind of save. */
1099 switch (save_level)
1100 {
1101 #ifdef HAVE_restore_stack_block
1102 case SAVE_BLOCK:
1103 if (HAVE_restore_stack_block)
1104 fcn = gen_restore_stack_block;
1105 break;
1106 #endif
1107 #ifdef HAVE_restore_stack_function
1108 case SAVE_FUNCTION:
1109 if (HAVE_restore_stack_function)
1110 fcn = gen_restore_stack_function;
1111 break;
1112 #endif
1113 #ifdef HAVE_restore_stack_nonlocal
1114 case SAVE_NONLOCAL:
1115 if (HAVE_restore_stack_nonlocal)
1116 fcn = gen_restore_stack_nonlocal;
1117 break;
1118 #endif
1119 default:
1120 break;
1121 }
1122
1123 if (sa != 0)
1124 sa = validize_mem (sa);
1125
1126 if (after)
1127 {
1128 rtx seq;
1129
1130 start_sequence ();
1131 emit_insn (fcn (stack_pointer_rtx, sa));
1132 seq = gen_sequence ();
1133 end_sequence ();
1134 emit_insn_after (seq, after);
1135 }
1136 else
1137 emit_insn (fcn (stack_pointer_rtx, sa));
1138 }
1139 \f
1140 #ifdef SETJMP_VIA_SAVE_AREA
1141 /* Optimize RTL generated by allocate_dynamic_stack_space for targets
1142 where SETJMP_VIA_SAVE_AREA is true. The problem is that on these
1143 platforms, the dynamic stack space used can corrupt the original
1144 frame, thus causing a crash if a longjmp unwinds to it. */
1145
1146 void
1147 optimize_save_area_alloca (insns)
1148 rtx insns;
1149 {
1150 rtx insn;
1151
1152 for (insn = insns; insn; insn = NEXT_INSN(insn))
1153 {
1154 rtx note;
1155
1156 if (GET_CODE (insn) != INSN)
1157 continue;
1158
1159 for (note = REG_NOTES (insn); note; note = XEXP (note, 1))
1160 {
1161 if (REG_NOTE_KIND (note) != REG_SAVE_AREA)
1162 continue;
1163
1164 if (!current_function_calls_setjmp)
1165 {
1166 rtx pat = PATTERN (insn);
1167
1168 /* If we do not see the note in a pattern matching
1169 these precise characteristics, we did something
1170 entirely wrong in allocate_dynamic_stack_space.
1171
1172 Note, one way this could happen is if SETJMP_VIA_SAVE_AREA
1173 was defined on a machine where stacks grow towards higher
1174 addresses.
1175
1176 Right now only supported port with stack that grow upward
1177 is the HPPA and it does not define SETJMP_VIA_SAVE_AREA. */
1178 if (GET_CODE (pat) != SET
1179 || SET_DEST (pat) != stack_pointer_rtx
1180 || GET_CODE (SET_SRC (pat)) != MINUS
1181 || XEXP (SET_SRC (pat), 0) != stack_pointer_rtx)
1182 abort ();
1183
1184 /* This will now be transformed into a (set REG REG)
1185 so we can just blow away all the other notes. */
1186 XEXP (SET_SRC (pat), 1) = XEXP (note, 0);
1187 REG_NOTES (insn) = NULL_RTX;
1188 }
1189 else
1190 {
1191 /* setjmp was called, we must remove the REG_SAVE_AREA
1192 note so that later passes do not get confused by its
1193 presence. */
1194 if (note == REG_NOTES (insn))
1195 {
1196 REG_NOTES (insn) = XEXP (note, 1);
1197 }
1198 else
1199 {
1200 rtx srch;
1201
1202 for (srch = REG_NOTES (insn); srch; srch = XEXP (srch, 1))
1203 if (XEXP (srch, 1) == note)
1204 break;
1205
1206 if (srch == NULL_RTX)
1207 abort();
1208
1209 XEXP (srch, 1) = XEXP (note, 1);
1210 }
1211 }
1212 /* Once we've seen the note of interest, we need not look at
1213 the rest of them. */
1214 break;
1215 }
1216 }
1217 }
1218 #endif /* SETJMP_VIA_SAVE_AREA */
1219
1220 /* Return an rtx representing the address of an area of memory dynamically
1221 pushed on the stack. This region of memory is always aligned to
1222 a multiple of BIGGEST_ALIGNMENT.
1223
1224 Any required stack pointer alignment is preserved.
1225
1226 SIZE is an rtx representing the size of the area.
1227 TARGET is a place in which the address can be placed.
1228
1229 KNOWN_ALIGN is the alignment (in bits) that we know SIZE has. */
1230
1231 rtx
1232 allocate_dynamic_stack_space (size, target, known_align)
1233 rtx size;
1234 rtx target;
1235 int known_align;
1236 {
1237 #ifdef SETJMP_VIA_SAVE_AREA
1238 rtx setjmpless_size = NULL_RTX;
1239 #endif
1240
1241 /* If we're asking for zero bytes, it doesn't matter what we point
1242 to since we can't dereference it. But return a reasonable
1243 address anyway. */
1244 if (size == const0_rtx)
1245 return virtual_stack_dynamic_rtx;
1246
1247 /* Otherwise, show we're calling alloca or equivalent. */
1248 current_function_calls_alloca = 1;
1249
1250 /* Ensure the size is in the proper mode. */
1251 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1252 size = convert_to_mode (Pmode, size, 1);
1253
1254 /* We can't attempt to minimize alignment necessary, because we don't
1255 know the final value of preferred_stack_boundary yet while executing
1256 this code. */
1257 #ifdef PREFERRED_STACK_BOUNDARY
1258 cfun->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1259 #endif
1260
1261 /* We will need to ensure that the address we return is aligned to
1262 BIGGEST_ALIGNMENT. If STACK_DYNAMIC_OFFSET is defined, we don't
1263 always know its final value at this point in the compilation (it
1264 might depend on the size of the outgoing parameter lists, for
1265 example), so we must align the value to be returned in that case.
1266 (Note that STACK_DYNAMIC_OFFSET will have a default non-zero value if
1267 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1268 We must also do an alignment operation on the returned value if
1269 the stack pointer alignment is less strict that BIGGEST_ALIGNMENT.
1270
1271 If we have to align, we must leave space in SIZE for the hole
1272 that might result from the alignment operation. */
1273
1274 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET) || ! defined (PREFERRED_STACK_BOUNDARY)
1275 #define MUST_ALIGN 1
1276 #else
1277 #define MUST_ALIGN (PREFERRED_STACK_BOUNDARY < BIGGEST_ALIGNMENT)
1278 #endif
1279
1280 if (MUST_ALIGN)
1281 size
1282 = force_operand (plus_constant (size,
1283 BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1284 NULL_RTX);
1285
1286 #ifdef SETJMP_VIA_SAVE_AREA
1287 /* If setjmp restores regs from a save area in the stack frame,
1288 avoid clobbering the reg save area. Note that the offset of
1289 virtual_incoming_args_rtx includes the preallocated stack args space.
1290 It would be no problem to clobber that, but it's on the wrong side
1291 of the old save area. */
1292 {
1293 rtx dynamic_offset
1294 = expand_binop (Pmode, sub_optab, virtual_stack_dynamic_rtx,
1295 stack_pointer_rtx, NULL_RTX, 1, OPTAB_LIB_WIDEN);
1296
1297 if (!current_function_calls_setjmp)
1298 {
1299 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1300
1301 /* See optimize_save_area_alloca to understand what is being
1302 set up here. */
1303
1304 #if !defined(PREFERRED_STACK_BOUNDARY) || !defined(MUST_ALIGN) || (PREFERRED_STACK_BOUNDARY != BIGGEST_ALIGNMENT)
1305 /* If anyone creates a target with these characteristics, let them
1306 know that our optimization cannot work correctly in such a case. */
1307 abort ();
1308 #endif
1309
1310 if (GET_CODE (size) == CONST_INT)
1311 {
1312 HOST_WIDE_INT new = INTVAL (size) / align * align;
1313
1314 if (INTVAL (size) != new)
1315 setjmpless_size = GEN_INT (new);
1316 else
1317 setjmpless_size = size;
1318 }
1319 else
1320 {
1321 /* Since we know overflow is not possible, we avoid using
1322 CEIL_DIV_EXPR and use TRUNC_DIV_EXPR instead. */
1323 setjmpless_size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size,
1324 GEN_INT (align), NULL_RTX, 1);
1325 setjmpless_size = expand_mult (Pmode, setjmpless_size,
1326 GEN_INT (align), NULL_RTX, 1);
1327 }
1328 /* Our optimization works based upon being able to perform a simple
1329 transformation of this RTL into a (set REG REG) so make sure things
1330 did in fact end up in a REG. */
1331 if (!register_operand (setjmpless_size, Pmode))
1332 setjmpless_size = force_reg (Pmode, setjmpless_size);
1333 }
1334
1335 size = expand_binop (Pmode, add_optab, size, dynamic_offset,
1336 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1337 }
1338 #endif /* SETJMP_VIA_SAVE_AREA */
1339
1340 /* Round the size to a multiple of the required stack alignment.
1341 Since the stack if presumed to be rounded before this allocation,
1342 this will maintain the required alignment.
1343
1344 If the stack grows downward, we could save an insn by subtracting
1345 SIZE from the stack pointer and then aligning the stack pointer.
1346 The problem with this is that the stack pointer may be unaligned
1347 between the execution of the subtraction and alignment insns and
1348 some machines do not allow this. Even on those that do, some
1349 signal handlers malfunction if a signal should occur between those
1350 insns. Since this is an extremely rare event, we have no reliable
1351 way of knowing which systems have this problem. So we avoid even
1352 momentarily mis-aligning the stack. */
1353
1354 #ifdef PREFERRED_STACK_BOUNDARY
1355 /* If we added a variable amount to SIZE,
1356 we can no longer assume it is aligned. */
1357 #if !defined (SETJMP_VIA_SAVE_AREA)
1358 if (MUST_ALIGN || known_align % PREFERRED_STACK_BOUNDARY != 0)
1359 #endif
1360 size = round_push (size);
1361 #endif
1362
1363 do_pending_stack_adjust ();
1364
1365 /* We ought to be called always on the toplevel and stack ought to be aligned
1366 propertly. */
1367 #ifdef PREFERRED_STACK_BOUNDARY
1368 if (stack_pointer_delta % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT))
1369 abort ();
1370 #endif
1371
1372 /* If needed, check that we have the required amount of stack. Take into
1373 account what has already been checked. */
1374 if (flag_stack_check && ! STACK_CHECK_BUILTIN)
1375 probe_stack_range (STACK_CHECK_MAX_FRAME_SIZE + STACK_CHECK_PROTECT, size);
1376
1377 /* Don't use a TARGET that isn't a pseudo or is the wrong mode. */
1378 if (target == 0 || GET_CODE (target) != REG
1379 || REGNO (target) < FIRST_PSEUDO_REGISTER
1380 || GET_MODE (target) != Pmode)
1381 target = gen_reg_rtx (Pmode);
1382
1383 mark_reg_pointer (target, known_align);
1384
1385 /* Perform the required allocation from the stack. Some systems do
1386 this differently than simply incrementing/decrementing from the
1387 stack pointer, such as acquiring the space by calling malloc(). */
1388 #ifdef HAVE_allocate_stack
1389 if (HAVE_allocate_stack)
1390 {
1391 enum machine_mode mode = STACK_SIZE_MODE;
1392 insn_operand_predicate_fn pred;
1393
1394 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[0].predicate;
1395 if (pred && ! ((*pred) (target, Pmode)))
1396 #ifdef POINTERS_EXTEND_UNSIGNED
1397 target = convert_memory_address (Pmode, target);
1398 #else
1399 target = copy_to_mode_reg (Pmode, target);
1400 #endif
1401
1402 if (mode == VOIDmode)
1403 mode = Pmode;
1404
1405 pred = insn_data[(int) CODE_FOR_allocate_stack].operand[1].predicate;
1406 if (pred && ! ((*pred) (size, mode)))
1407 size = copy_to_mode_reg (mode, size);
1408
1409 emit_insn (gen_allocate_stack (target, size));
1410 }
1411 else
1412 #endif
1413 {
1414 #ifndef STACK_GROWS_DOWNWARD
1415 emit_move_insn (target, virtual_stack_dynamic_rtx);
1416 #endif
1417
1418 /* Check stack bounds if necessary. */
1419 if (current_function_limit_stack)
1420 {
1421 rtx available;
1422 rtx space_available = gen_label_rtx ();
1423 #ifdef STACK_GROWS_DOWNWARD
1424 available = expand_binop (Pmode, sub_optab,
1425 stack_pointer_rtx, stack_limit_rtx,
1426 NULL_RTX, 1, OPTAB_WIDEN);
1427 #else
1428 available = expand_binop (Pmode, sub_optab,
1429 stack_limit_rtx, stack_pointer_rtx,
1430 NULL_RTX, 1, OPTAB_WIDEN);
1431 #endif
1432 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1433 0, space_available);
1434 #ifdef HAVE_trap
1435 if (HAVE_trap)
1436 emit_insn (gen_trap ());
1437 else
1438 #endif
1439 error ("stack limits not supported on this target");
1440 emit_barrier ();
1441 emit_label (space_available);
1442 }
1443
1444 anti_adjust_stack (size);
1445 #ifdef SETJMP_VIA_SAVE_AREA
1446 if (setjmpless_size != NULL_RTX)
1447 {
1448 rtx note_target = get_last_insn ();
1449
1450 REG_NOTES (note_target)
1451 = gen_rtx_EXPR_LIST (REG_SAVE_AREA, setjmpless_size,
1452 REG_NOTES (note_target));
1453 }
1454 #endif /* SETJMP_VIA_SAVE_AREA */
1455
1456 #ifdef STACK_GROWS_DOWNWARD
1457 emit_move_insn (target, virtual_stack_dynamic_rtx);
1458 #endif
1459 }
1460
1461 if (MUST_ALIGN)
1462 {
1463 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1464 but we know it can't. So add ourselves and then do
1465 TRUNC_DIV_EXPR. */
1466 target = expand_binop (Pmode, add_optab, target,
1467 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT - 1),
1468 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1469 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1470 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1471 NULL_RTX, 1);
1472 target = expand_mult (Pmode, target,
1473 GEN_INT (BIGGEST_ALIGNMENT / BITS_PER_UNIT),
1474 NULL_RTX, 1);
1475 }
1476
1477 /* Some systems require a particular insn to refer to the stack
1478 to make the pages exist. */
1479 #ifdef HAVE_probe
1480 if (HAVE_probe)
1481 emit_insn (gen_probe ());
1482 #endif
1483
1484 /* Record the new stack level for nonlocal gotos. */
1485 if (nonlocal_goto_handler_slots != 0)
1486 emit_stack_save (SAVE_NONLOCAL, &nonlocal_goto_stack_level, NULL_RTX);
1487
1488 return target;
1489 }
1490 \f
1491 /* A front end may want to override GCC's stack checking by providing a
1492 run-time routine to call to check the stack, so provide a mechanism for
1493 calling that routine. */
1494
1495 static rtx stack_check_libfunc;
1496
1497 void
1498 set_stack_check_libfunc (libfunc)
1499 rtx libfunc;
1500 {
1501 stack_check_libfunc = libfunc;
1502 }
1503 \f
1504 /* Emit one stack probe at ADDRESS, an address within the stack. */
1505
1506 static void
1507 emit_stack_probe (address)
1508 rtx address;
1509 {
1510 rtx memref = gen_rtx_MEM (word_mode, address);
1511
1512 MEM_VOLATILE_P (memref) = 1;
1513
1514 if (STACK_CHECK_PROBE_LOAD)
1515 emit_move_insn (gen_reg_rtx (word_mode), memref);
1516 else
1517 emit_move_insn (memref, const0_rtx);
1518 }
1519
1520 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1521 FIRST is a constant and size is a Pmode RTX. These are offsets from the
1522 current stack pointer. STACK_GROWS_DOWNWARD says whether to add or
1523 subtract from the stack. If SIZE is constant, this is done
1524 with a fixed number of probes. Otherwise, we must make a loop. */
1525
1526 #ifdef STACK_GROWS_DOWNWARD
1527 #define STACK_GROW_OP MINUS
1528 #else
1529 #define STACK_GROW_OP PLUS
1530 #endif
1531
1532 void
1533 probe_stack_range (first, size)
1534 HOST_WIDE_INT first;
1535 rtx size;
1536 {
1537 /* First see if the front end has set up a function for us to call to
1538 check the stack. */
1539 if (stack_check_libfunc != 0)
1540 {
1541 rtx addr = memory_address (QImode,
1542 gen_rtx (STACK_GROW_OP, Pmode,
1543 stack_pointer_rtx,
1544 plus_constant (size, first)));
1545
1546 #ifdef POINTERS_EXTEND_UNSIGNED
1547 if (GET_MODE (addr) != ptr_mode)
1548 addr = convert_memory_address (ptr_mode, addr);
1549 #endif
1550
1551 emit_library_call (stack_check_libfunc, 0, VOIDmode, 1, addr,
1552 ptr_mode);
1553 }
1554
1555 /* Next see if we have an insn to check the stack. Use it if so. */
1556 #ifdef HAVE_check_stack
1557 else if (HAVE_check_stack)
1558 {
1559 insn_operand_predicate_fn pred;
1560 rtx last_addr
1561 = force_operand (gen_rtx_STACK_GROW_OP (Pmode,
1562 stack_pointer_rtx,
1563 plus_constant (size, first)),
1564 NULL_RTX);
1565
1566 pred = insn_data[(int) CODE_FOR_check_stack].operand[0].predicate;
1567 if (pred && ! ((*pred) (last_addr, Pmode)))
1568 last_addr = copy_to_mode_reg (Pmode, last_addr);
1569
1570 emit_insn (gen_check_stack (last_addr));
1571 }
1572 #endif
1573
1574 /* If we have to generate explicit probes, see if we have a constant
1575 small number of them to generate. If so, that's the easy case. */
1576 else if (GET_CODE (size) == CONST_INT
1577 && INTVAL (size) < 10 * STACK_CHECK_PROBE_INTERVAL)
1578 {
1579 HOST_WIDE_INT offset;
1580
1581 /* Start probing at FIRST + N * STACK_CHECK_PROBE_INTERVAL
1582 for values of N from 1 until it exceeds LAST. If only one
1583 probe is needed, this will not generate any code. Then probe
1584 at LAST. */
1585 for (offset = first + STACK_CHECK_PROBE_INTERVAL;
1586 offset < INTVAL (size);
1587 offset = offset + STACK_CHECK_PROBE_INTERVAL)
1588 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1589 stack_pointer_rtx,
1590 GEN_INT (offset)));
1591
1592 emit_stack_probe (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1593 stack_pointer_rtx,
1594 plus_constant (size, first)));
1595 }
1596
1597 /* In the variable case, do the same as above, but in a loop. We emit loop
1598 notes so that loop optimization can be done. */
1599 else
1600 {
1601 rtx test_addr
1602 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1603 stack_pointer_rtx,
1604 GEN_INT (first + STACK_CHECK_PROBE_INTERVAL)),
1605 NULL_RTX);
1606 rtx last_addr
1607 = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1608 stack_pointer_rtx,
1609 plus_constant (size, first)),
1610 NULL_RTX);
1611 rtx incr = GEN_INT (STACK_CHECK_PROBE_INTERVAL);
1612 rtx loop_lab = gen_label_rtx ();
1613 rtx test_lab = gen_label_rtx ();
1614 rtx end_lab = gen_label_rtx ();
1615 rtx temp;
1616
1617 if (GET_CODE (test_addr) != REG
1618 || REGNO (test_addr) < FIRST_PSEUDO_REGISTER)
1619 test_addr = force_reg (Pmode, test_addr);
1620
1621 emit_note (NULL, NOTE_INSN_LOOP_BEG);
1622 emit_jump (test_lab);
1623
1624 emit_label (loop_lab);
1625 emit_stack_probe (test_addr);
1626
1627 emit_note (NULL, NOTE_INSN_LOOP_CONT);
1628
1629 #ifdef STACK_GROWS_DOWNWARD
1630 #define CMP_OPCODE GTU
1631 temp = expand_binop (Pmode, sub_optab, test_addr, incr, test_addr,
1632 1, OPTAB_WIDEN);
1633 #else
1634 #define CMP_OPCODE LTU
1635 temp = expand_binop (Pmode, add_optab, test_addr, incr, test_addr,
1636 1, OPTAB_WIDEN);
1637 #endif
1638
1639 if (temp != test_addr)
1640 abort ();
1641
1642 emit_label (test_lab);
1643 emit_cmp_and_jump_insns (test_addr, last_addr, CMP_OPCODE,
1644 NULL_RTX, Pmode, 1, 0, loop_lab);
1645 emit_jump (end_lab);
1646 emit_note (NULL, NOTE_INSN_LOOP_END);
1647 emit_label (end_lab);
1648
1649 emit_stack_probe (last_addr);
1650 }
1651 }
1652 \f
1653 /* Return an rtx representing the register or memory location
1654 in which a scalar value of data type VALTYPE
1655 was returned by a function call to function FUNC.
1656 FUNC is a FUNCTION_DECL node if the precise function is known,
1657 otherwise 0.
1658 OUTGOING is 1 if on a machine with register windows this function
1659 should return the register in which the function will put its result
1660 and 0 otherwise. */
1661
1662 rtx
1663 hard_function_value (valtype, func, outgoing)
1664 tree valtype;
1665 tree func ATTRIBUTE_UNUSED;
1666 int outgoing ATTRIBUTE_UNUSED;
1667 {
1668 rtx val;
1669
1670 #ifdef FUNCTION_OUTGOING_VALUE
1671 if (outgoing)
1672 val = FUNCTION_OUTGOING_VALUE (valtype, func);
1673 else
1674 #endif
1675 val = FUNCTION_VALUE (valtype, func);
1676
1677 if (GET_CODE (val) == REG
1678 && GET_MODE (val) == BLKmode)
1679 {
1680 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1681 enum machine_mode tmpmode;
1682
1683 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1684 tmpmode != VOIDmode;
1685 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1686 {
1687 /* Have we found a large enough mode? */
1688 if (GET_MODE_SIZE (tmpmode) >= bytes)
1689 break;
1690 }
1691
1692 /* No suitable mode found. */
1693 if (tmpmode == VOIDmode)
1694 abort ();
1695
1696 PUT_MODE (val, tmpmode);
1697 }
1698 return val;
1699 }
1700
1701 /* Return an rtx representing the register or memory location
1702 in which a scalar value of mode MODE was returned by a library call. */
1703
1704 rtx
1705 hard_libcall_value (mode)
1706 enum machine_mode mode;
1707 {
1708 return LIBCALL_VALUE (mode);
1709 }
1710
1711 /* Look up the tree code for a given rtx code
1712 to provide the arithmetic operation for REAL_ARITHMETIC.
1713 The function returns an int because the caller may not know
1714 what `enum tree_code' means. */
1715
1716 int
1717 rtx_to_tree_code (code)
1718 enum rtx_code code;
1719 {
1720 enum tree_code tcode;
1721
1722 switch (code)
1723 {
1724 case PLUS:
1725 tcode = PLUS_EXPR;
1726 break;
1727 case MINUS:
1728 tcode = MINUS_EXPR;
1729 break;
1730 case MULT:
1731 tcode = MULT_EXPR;
1732 break;
1733 case DIV:
1734 tcode = RDIV_EXPR;
1735 break;
1736 case SMIN:
1737 tcode = MIN_EXPR;
1738 break;
1739 case SMAX:
1740 tcode = MAX_EXPR;
1741 break;
1742 default:
1743 tcode = LAST_AND_UNUSED_TREE_CODE;
1744 break;
1745 }
1746 return ((int) tcode);
1747 }