]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
remove most ifdef STACK_GROWS_DOWNWARD
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "rtl.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "real.h"
37 #include "tree.h"
38 #include "stor-layout.h"
39 #include "tm_p.h"
40 #include "flags.h"
41 #include "except.h"
42 #include "hard-reg-set.h"
43 #include "function.h"
44 #include "hashtab.h"
45 #include "statistics.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "emit-rtl.h"
53 #include "varasm.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "libfuncs.h"
59 #include "ggc.h"
60 #include "recog.h"
61 #include "langhooks.h"
62 #include "target.h"
63 #include "common/common-target.h"
64 #include "output.h"
65
66 static rtx break_out_memory_refs (rtx);
67
68
69 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
70
71 HOST_WIDE_INT
72 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
73 {
74 int width = GET_MODE_PRECISION (mode);
75
76 /* You want to truncate to a _what_? */
77 gcc_assert (SCALAR_INT_MODE_P (mode)
78 || POINTER_BOUNDS_MODE_P (mode));
79
80 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
81 if (mode == BImode)
82 return c & 1 ? STORE_FLAG_VALUE : 0;
83
84 /* Sign-extend for the requested mode. */
85
86 if (width < HOST_BITS_PER_WIDE_INT)
87 {
88 HOST_WIDE_INT sign = 1;
89 sign <<= width - 1;
90 c &= (sign << 1) - 1;
91 c ^= sign;
92 c -= sign;
93 }
94
95 return c;
96 }
97
98 /* Return an rtx for the sum of X and the integer C, given that X has
99 mode MODE. INPLACE is true if X can be modified inplace or false
100 if it must be treated as immutable. */
101
102 rtx
103 plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
104 bool inplace)
105 {
106 RTX_CODE code;
107 rtx y;
108 rtx tem;
109 int all_constant = 0;
110
111 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
112
113 if (c == 0)
114 return x;
115
116 restart:
117
118 code = GET_CODE (x);
119 y = x;
120
121 switch (code)
122 {
123 CASE_CONST_SCALAR_INT:
124 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
125 mode);
126 case MEM:
127 /* If this is a reference to the constant pool, try replacing it with
128 a reference to a new constant. If the resulting address isn't
129 valid, don't return it because we have no way to validize it. */
130 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
131 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
132 {
133 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
134 tem = force_const_mem (GET_MODE (x), tem);
135 if (memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
136 return tem;
137 }
138 break;
139
140 case CONST:
141 /* If adding to something entirely constant, set a flag
142 so that we can add a CONST around the result. */
143 if (inplace && shared_const_p (x))
144 inplace = false;
145 x = XEXP (x, 0);
146 all_constant = 1;
147 goto restart;
148
149 case SYMBOL_REF:
150 case LABEL_REF:
151 all_constant = 1;
152 break;
153
154 case PLUS:
155 /* The interesting case is adding the integer to a sum. Look
156 for constant term in the sum and combine with C. For an
157 integer constant term or a constant term that is not an
158 explicit integer, we combine or group them together anyway.
159
160 We may not immediately return from the recursive call here, lest
161 all_constant gets lost. */
162
163 if (CONSTANT_P (XEXP (x, 1)))
164 {
165 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
166 if (term == const0_rtx)
167 x = XEXP (x, 0);
168 else if (inplace)
169 XEXP (x, 1) = term;
170 else
171 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
172 c = 0;
173 }
174 else if (rtx *const_loc = find_constant_term_loc (&y))
175 {
176 if (!inplace)
177 {
178 /* We need to be careful since X may be shared and we can't
179 modify it in place. */
180 x = copy_rtx (x);
181 const_loc = find_constant_term_loc (&x);
182 }
183 *const_loc = plus_constant (mode, *const_loc, c, true);
184 c = 0;
185 }
186 break;
187
188 default:
189 break;
190 }
191
192 if (c != 0)
193 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
194
195 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
196 return x;
197 else if (all_constant)
198 return gen_rtx_CONST (mode, x);
199 else
200 return x;
201 }
202 \f
203 /* If X is a sum, return a new sum like X but lacking any constant terms.
204 Add all the removed constant terms into *CONSTPTR.
205 X itself is not altered. The result != X if and only if
206 it is not isomorphic to X. */
207
208 rtx
209 eliminate_constant_term (rtx x, rtx *constptr)
210 {
211 rtx x0, x1;
212 rtx tem;
213
214 if (GET_CODE (x) != PLUS)
215 return x;
216
217 /* First handle constants appearing at this level explicitly. */
218 if (CONST_INT_P (XEXP (x, 1))
219 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
220 XEXP (x, 1)))
221 && CONST_INT_P (tem))
222 {
223 *constptr = tem;
224 return eliminate_constant_term (XEXP (x, 0), constptr);
225 }
226
227 tem = const0_rtx;
228 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
229 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
230 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
231 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
232 *constptr, tem))
233 && CONST_INT_P (tem))
234 {
235 *constptr = tem;
236 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
237 }
238
239 return x;
240 }
241
242 \f
243 /* Return a copy of X in which all memory references
244 and all constants that involve symbol refs
245 have been replaced with new temporary registers.
246 Also emit code to load the memory locations and constants
247 into those registers.
248
249 If X contains no such constants or memory references,
250 X itself (not a copy) is returned.
251
252 If a constant is found in the address that is not a legitimate constant
253 in an insn, it is left alone in the hope that it might be valid in the
254 address.
255
256 X may contain no arithmetic except addition, subtraction and multiplication.
257 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
258
259 static rtx
260 break_out_memory_refs (rtx x)
261 {
262 if (MEM_P (x)
263 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
264 && GET_MODE (x) != VOIDmode))
265 x = force_reg (GET_MODE (x), x);
266 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
267 || GET_CODE (x) == MULT)
268 {
269 rtx op0 = break_out_memory_refs (XEXP (x, 0));
270 rtx op1 = break_out_memory_refs (XEXP (x, 1));
271
272 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
273 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
274 }
275
276 return x;
277 }
278
279 /* Given X, a memory address in address space AS' pointer mode, convert it to
280 an address in the address space's address mode, or vice versa (TO_MODE says
281 which way). We take advantage of the fact that pointers are not allowed to
282 overflow by commuting arithmetic operations over conversions so that address
283 arithmetic insns can be used. IN_CONST is true if this conversion is inside
284 a CONST. */
285
286 static rtx
287 convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
288 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
289 bool in_const ATTRIBUTE_UNUSED)
290 {
291 #ifndef POINTERS_EXTEND_UNSIGNED
292 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
293 return x;
294 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
295 machine_mode pointer_mode, address_mode, from_mode;
296 rtx temp;
297 enum rtx_code code;
298
299 /* If X already has the right mode, just return it. */
300 if (GET_MODE (x) == to_mode)
301 return x;
302
303 pointer_mode = targetm.addr_space.pointer_mode (as);
304 address_mode = targetm.addr_space.address_mode (as);
305 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
306
307 /* Here we handle some special cases. If none of them apply, fall through
308 to the default case. */
309 switch (GET_CODE (x))
310 {
311 CASE_CONST_SCALAR_INT:
312 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
313 code = TRUNCATE;
314 else if (POINTERS_EXTEND_UNSIGNED < 0)
315 break;
316 else if (POINTERS_EXTEND_UNSIGNED > 0)
317 code = ZERO_EXTEND;
318 else
319 code = SIGN_EXTEND;
320 temp = simplify_unary_operation (code, to_mode, x, from_mode);
321 if (temp)
322 return temp;
323 break;
324
325 case SUBREG:
326 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
327 && GET_MODE (SUBREG_REG (x)) == to_mode)
328 return SUBREG_REG (x);
329 break;
330
331 case LABEL_REF:
332 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
333 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
334 return temp;
335 break;
336
337 case SYMBOL_REF:
338 temp = shallow_copy_rtx (x);
339 PUT_MODE (temp, to_mode);
340 return temp;
341 break;
342
343 case CONST:
344 return gen_rtx_CONST (to_mode,
345 convert_memory_address_addr_space_1
346 (to_mode, XEXP (x, 0), as, true));
347 break;
348
349 case PLUS:
350 case MULT:
351 /* For addition we can safely permute the conversion and addition
352 operation if one operand is a constant and converting the constant
353 does not change it or if one operand is a constant and we are
354 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
355 We can always safely permute them if we are making the address
356 narrower. Inside a CONST RTL, this is safe for both pointers
357 zero or sign extended as pointers cannot wrap. */
358 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
359 || (GET_CODE (x) == PLUS
360 && CONST_INT_P (XEXP (x, 1))
361 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
362 || XEXP (x, 1) == convert_memory_address_addr_space_1
363 (to_mode, XEXP (x, 1), as, in_const)
364 || POINTERS_EXTEND_UNSIGNED < 0)))
365 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
366 convert_memory_address_addr_space_1
367 (to_mode, XEXP (x, 0), as, in_const),
368 XEXP (x, 1));
369 break;
370
371 default:
372 break;
373 }
374
375 return convert_modes (to_mode, from_mode,
376 x, POINTERS_EXTEND_UNSIGNED);
377 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
378 }
379
380 /* Given X, a memory address in address space AS' pointer mode, convert it to
381 an address in the address space's address mode, or vice versa (TO_MODE says
382 which way). We take advantage of the fact that pointers are not allowed to
383 overflow by commuting arithmetic operations over conversions so that address
384 arithmetic insns can be used. */
385
386 rtx
387 convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
388 {
389 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
390 }
391 \f
392
393 /* Return something equivalent to X but valid as a memory address for something
394 of mode MODE in the named address space AS. When X is not itself valid,
395 this works by copying X or subexpressions of it into registers. */
396
397 rtx
398 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
399 {
400 rtx oldx = x;
401 machine_mode address_mode = targetm.addr_space.address_mode (as);
402
403 x = convert_memory_address_addr_space (address_mode, x, as);
404
405 /* By passing constant addresses through registers
406 we get a chance to cse them. */
407 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
408 x = force_reg (address_mode, x);
409
410 /* We get better cse by rejecting indirect addressing at this stage.
411 Let the combiner create indirect addresses where appropriate.
412 For now, generate the code so that the subexpressions useful to share
413 are visible. But not if cse won't be done! */
414 else
415 {
416 if (! cse_not_expected && !REG_P (x))
417 x = break_out_memory_refs (x);
418
419 /* At this point, any valid address is accepted. */
420 if (memory_address_addr_space_p (mode, x, as))
421 goto done;
422
423 /* If it was valid before but breaking out memory refs invalidated it,
424 use it the old way. */
425 if (memory_address_addr_space_p (mode, oldx, as))
426 {
427 x = oldx;
428 goto done;
429 }
430
431 /* Perform machine-dependent transformations on X
432 in certain cases. This is not necessary since the code
433 below can handle all possible cases, but machine-dependent
434 transformations can make better code. */
435 {
436 rtx orig_x = x;
437 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
438 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
439 goto done;
440 }
441
442 /* PLUS and MULT can appear in special ways
443 as the result of attempts to make an address usable for indexing.
444 Usually they are dealt with by calling force_operand, below.
445 But a sum containing constant terms is special
446 if removing them makes the sum a valid address:
447 then we generate that address in a register
448 and index off of it. We do this because it often makes
449 shorter code, and because the addresses thus generated
450 in registers often become common subexpressions. */
451 if (GET_CODE (x) == PLUS)
452 {
453 rtx constant_term = const0_rtx;
454 rtx y = eliminate_constant_term (x, &constant_term);
455 if (constant_term == const0_rtx
456 || ! memory_address_addr_space_p (mode, y, as))
457 x = force_operand (x, NULL_RTX);
458 else
459 {
460 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
461 if (! memory_address_addr_space_p (mode, y, as))
462 x = force_operand (x, NULL_RTX);
463 else
464 x = y;
465 }
466 }
467
468 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
469 x = force_operand (x, NULL_RTX);
470
471 /* If we have a register that's an invalid address,
472 it must be a hard reg of the wrong class. Copy it to a pseudo. */
473 else if (REG_P (x))
474 x = copy_to_reg (x);
475
476 /* Last resort: copy the value to a register, since
477 the register is a valid address. */
478 else
479 x = force_reg (address_mode, x);
480 }
481
482 done:
483
484 gcc_assert (memory_address_addr_space_p (mode, x, as));
485 /* If we didn't change the address, we are done. Otherwise, mark
486 a reg as a pointer if we have REG or REG + CONST_INT. */
487 if (oldx == x)
488 return x;
489 else if (REG_P (x))
490 mark_reg_pointer (x, BITS_PER_UNIT);
491 else if (GET_CODE (x) == PLUS
492 && REG_P (XEXP (x, 0))
493 && CONST_INT_P (XEXP (x, 1)))
494 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
495
496 /* OLDX may have been the address on a temporary. Update the address
497 to indicate that X is now used. */
498 update_temp_slot_address (oldx, x);
499
500 return x;
501 }
502
503 /* If REF is a MEM with an invalid address, change it into a valid address.
504 Pass through anything else unchanged. REF must be an unshared rtx and
505 the function may modify it in-place. */
506
507 rtx
508 validize_mem (rtx ref)
509 {
510 if (!MEM_P (ref))
511 return ref;
512 ref = use_anchored_address (ref);
513 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
514 MEM_ADDR_SPACE (ref)))
515 return ref;
516
517 return replace_equiv_address (ref, XEXP (ref, 0), true);
518 }
519
520 /* If X is a memory reference to a member of an object block, try rewriting
521 it to use an anchor instead. Return the new memory reference on success
522 and the old one on failure. */
523
524 rtx
525 use_anchored_address (rtx x)
526 {
527 rtx base;
528 HOST_WIDE_INT offset;
529 machine_mode mode;
530
531 if (!flag_section_anchors)
532 return x;
533
534 if (!MEM_P (x))
535 return x;
536
537 /* Split the address into a base and offset. */
538 base = XEXP (x, 0);
539 offset = 0;
540 if (GET_CODE (base) == CONST
541 && GET_CODE (XEXP (base, 0)) == PLUS
542 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
543 {
544 offset += INTVAL (XEXP (XEXP (base, 0), 1));
545 base = XEXP (XEXP (base, 0), 0);
546 }
547
548 /* Check whether BASE is suitable for anchors. */
549 if (GET_CODE (base) != SYMBOL_REF
550 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
551 || SYMBOL_REF_ANCHOR_P (base)
552 || SYMBOL_REF_BLOCK (base) == NULL
553 || !targetm.use_anchors_for_symbol_p (base))
554 return x;
555
556 /* Decide where BASE is going to be. */
557 place_block_symbol (base);
558
559 /* Get the anchor we need to use. */
560 offset += SYMBOL_REF_BLOCK_OFFSET (base);
561 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
562 SYMBOL_REF_TLS_MODEL (base));
563
564 /* Work out the offset from the anchor. */
565 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
566
567 /* If we're going to run a CSE pass, force the anchor into a register.
568 We will then be able to reuse registers for several accesses, if the
569 target costs say that that's worthwhile. */
570 mode = GET_MODE (base);
571 if (!cse_not_expected)
572 base = force_reg (mode, base);
573
574 return replace_equiv_address (x, plus_constant (mode, base, offset));
575 }
576 \f
577 /* Copy the value or contents of X to a new temp reg and return that reg. */
578
579 rtx
580 copy_to_reg (rtx x)
581 {
582 rtx temp = gen_reg_rtx (GET_MODE (x));
583
584 /* If not an operand, must be an address with PLUS and MULT so
585 do the computation. */
586 if (! general_operand (x, VOIDmode))
587 x = force_operand (x, temp);
588
589 if (x != temp)
590 emit_move_insn (temp, x);
591
592 return temp;
593 }
594
595 /* Like copy_to_reg but always give the new register mode Pmode
596 in case X is a constant. */
597
598 rtx
599 copy_addr_to_reg (rtx x)
600 {
601 return copy_to_mode_reg (Pmode, x);
602 }
603
604 /* Like copy_to_reg but always give the new register mode MODE
605 in case X is a constant. */
606
607 rtx
608 copy_to_mode_reg (machine_mode mode, rtx x)
609 {
610 rtx temp = gen_reg_rtx (mode);
611
612 /* If not an operand, must be an address with PLUS and MULT so
613 do the computation. */
614 if (! general_operand (x, VOIDmode))
615 x = force_operand (x, temp);
616
617 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
618 if (x != temp)
619 emit_move_insn (temp, x);
620 return temp;
621 }
622
623 /* Load X into a register if it is not already one.
624 Use mode MODE for the register.
625 X should be valid for mode MODE, but it may be a constant which
626 is valid for all integer modes; that's why caller must specify MODE.
627
628 The caller must not alter the value in the register we return,
629 since we mark it as a "constant" register. */
630
631 rtx
632 force_reg (machine_mode mode, rtx x)
633 {
634 rtx temp, set;
635 rtx_insn *insn;
636
637 if (REG_P (x))
638 return x;
639
640 if (general_operand (x, mode))
641 {
642 temp = gen_reg_rtx (mode);
643 insn = emit_move_insn (temp, x);
644 }
645 else
646 {
647 temp = force_operand (x, NULL_RTX);
648 if (REG_P (temp))
649 insn = get_last_insn ();
650 else
651 {
652 rtx temp2 = gen_reg_rtx (mode);
653 insn = emit_move_insn (temp2, temp);
654 temp = temp2;
655 }
656 }
657
658 /* Let optimizers know that TEMP's value never changes
659 and that X can be substituted for it. Don't get confused
660 if INSN set something else (such as a SUBREG of TEMP). */
661 if (CONSTANT_P (x)
662 && (set = single_set (insn)) != 0
663 && SET_DEST (set) == temp
664 && ! rtx_equal_p (x, SET_SRC (set)))
665 set_unique_reg_note (insn, REG_EQUAL, x);
666
667 /* Let optimizers know that TEMP is a pointer, and if so, the
668 known alignment of that pointer. */
669 {
670 unsigned align = 0;
671 if (GET_CODE (x) == SYMBOL_REF)
672 {
673 align = BITS_PER_UNIT;
674 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
675 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
676 }
677 else if (GET_CODE (x) == LABEL_REF)
678 align = BITS_PER_UNIT;
679 else if (GET_CODE (x) == CONST
680 && GET_CODE (XEXP (x, 0)) == PLUS
681 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
682 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
683 {
684 rtx s = XEXP (XEXP (x, 0), 0);
685 rtx c = XEXP (XEXP (x, 0), 1);
686 unsigned sa, ca;
687
688 sa = BITS_PER_UNIT;
689 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
690 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
691
692 if (INTVAL (c) == 0)
693 align = sa;
694 else
695 {
696 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
697 align = MIN (sa, ca);
698 }
699 }
700
701 if (align || (MEM_P (x) && MEM_POINTER (x)))
702 mark_reg_pointer (temp, align);
703 }
704
705 return temp;
706 }
707
708 /* If X is a memory ref, copy its contents to a new temp reg and return
709 that reg. Otherwise, return X. */
710
711 rtx
712 force_not_mem (rtx x)
713 {
714 rtx temp;
715
716 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
717 return x;
718
719 temp = gen_reg_rtx (GET_MODE (x));
720
721 if (MEM_POINTER (x))
722 REG_POINTER (temp) = 1;
723
724 emit_move_insn (temp, x);
725 return temp;
726 }
727
728 /* Copy X to TARGET (if it's nonzero and a reg)
729 or to a new temp reg and return that reg.
730 MODE is the mode to use for X in case it is a constant. */
731
732 rtx
733 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
734 {
735 rtx temp;
736
737 if (target && REG_P (target))
738 temp = target;
739 else
740 temp = gen_reg_rtx (mode);
741
742 emit_move_insn (temp, x);
743 return temp;
744 }
745 \f
746 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
747 PUNSIGNEDP points to the signedness of the type and may be adjusted
748 to show what signedness to use on extension operations.
749
750 FOR_RETURN is nonzero if the caller is promoting the return value
751 of FNDECL, else it is for promoting args. */
752
753 machine_mode
754 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
755 const_tree funtype, int for_return)
756 {
757 /* Called without a type node for a libcall. */
758 if (type == NULL_TREE)
759 {
760 if (INTEGRAL_MODE_P (mode))
761 return targetm.calls.promote_function_mode (NULL_TREE, mode,
762 punsignedp, funtype,
763 for_return);
764 else
765 return mode;
766 }
767
768 switch (TREE_CODE (type))
769 {
770 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
771 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
772 case POINTER_TYPE: case REFERENCE_TYPE:
773 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
774 for_return);
775
776 default:
777 return mode;
778 }
779 }
780 /* Return the mode to use to store a scalar of TYPE and MODE.
781 PUNSIGNEDP points to the signedness of the type and may be adjusted
782 to show what signedness to use on extension operations. */
783
784 machine_mode
785 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
786 int *punsignedp ATTRIBUTE_UNUSED)
787 {
788 #ifdef PROMOTE_MODE
789 enum tree_code code;
790 int unsignedp;
791 #endif
792
793 /* For libcalls this is invoked without TYPE from the backends
794 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
795 case. */
796 if (type == NULL_TREE)
797 return mode;
798
799 /* FIXME: this is the same logic that was there until GCC 4.4, but we
800 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
801 is not defined. The affected targets are M32C, S390, SPARC. */
802 #ifdef PROMOTE_MODE
803 code = TREE_CODE (type);
804 unsignedp = *punsignedp;
805
806 switch (code)
807 {
808 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
809 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
810 PROMOTE_MODE (mode, unsignedp, type);
811 *punsignedp = unsignedp;
812 return mode;
813 break;
814
815 #ifdef POINTERS_EXTEND_UNSIGNED
816 case REFERENCE_TYPE:
817 case POINTER_TYPE:
818 *punsignedp = POINTERS_EXTEND_UNSIGNED;
819 return targetm.addr_space.address_mode
820 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
821 break;
822 #endif
823
824 default:
825 return mode;
826 }
827 #else
828 return mode;
829 #endif
830 }
831
832
833 /* Use one of promote_mode or promote_function_mode to find the promoted
834 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
835 of DECL after promotion. */
836
837 machine_mode
838 promote_decl_mode (const_tree decl, int *punsignedp)
839 {
840 tree type = TREE_TYPE (decl);
841 int unsignedp = TYPE_UNSIGNED (type);
842 machine_mode mode = DECL_MODE (decl);
843 machine_mode pmode;
844
845 if (TREE_CODE (decl) == RESULT_DECL
846 || TREE_CODE (decl) == PARM_DECL)
847 pmode = promote_function_mode (type, mode, &unsignedp,
848 TREE_TYPE (current_function_decl), 2);
849 else
850 pmode = promote_mode (type, mode, &unsignedp);
851
852 if (punsignedp)
853 *punsignedp = unsignedp;
854 return pmode;
855 }
856
857 \f
858 /* Controls the behaviour of {anti_,}adjust_stack. */
859 static bool suppress_reg_args_size;
860
861 /* A helper for adjust_stack and anti_adjust_stack. */
862
863 static void
864 adjust_stack_1 (rtx adjust, bool anti_p)
865 {
866 rtx temp;
867 rtx_insn *insn;
868
869 /* Hereafter anti_p means subtract_p. */
870 if (!STACK_GROWS_DOWNWARD)
871 anti_p = !anti_p;
872
873 temp = expand_binop (Pmode,
874 anti_p ? sub_optab : add_optab,
875 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
876 OPTAB_LIB_WIDEN);
877
878 if (temp != stack_pointer_rtx)
879 insn = emit_move_insn (stack_pointer_rtx, temp);
880 else
881 {
882 insn = get_last_insn ();
883 temp = single_set (insn);
884 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
885 }
886
887 if (!suppress_reg_args_size)
888 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
889 }
890
891 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
892 This pops when ADJUST is positive. ADJUST need not be constant. */
893
894 void
895 adjust_stack (rtx adjust)
896 {
897 if (adjust == const0_rtx)
898 return;
899
900 /* We expect all variable sized adjustments to be multiple of
901 PREFERRED_STACK_BOUNDARY. */
902 if (CONST_INT_P (adjust))
903 stack_pointer_delta -= INTVAL (adjust);
904
905 adjust_stack_1 (adjust, false);
906 }
907
908 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
909 This pushes when ADJUST is positive. ADJUST need not be constant. */
910
911 void
912 anti_adjust_stack (rtx adjust)
913 {
914 if (adjust == const0_rtx)
915 return;
916
917 /* We expect all variable sized adjustments to be multiple of
918 PREFERRED_STACK_BOUNDARY. */
919 if (CONST_INT_P (adjust))
920 stack_pointer_delta += INTVAL (adjust);
921
922 adjust_stack_1 (adjust, true);
923 }
924
925 /* Round the size of a block to be pushed up to the boundary required
926 by this machine. SIZE is the desired size, which need not be constant. */
927
928 static rtx
929 round_push (rtx size)
930 {
931 rtx align_rtx, alignm1_rtx;
932
933 if (!SUPPORTS_STACK_ALIGNMENT
934 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
935 {
936 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
937
938 if (align == 1)
939 return size;
940
941 if (CONST_INT_P (size))
942 {
943 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
944
945 if (INTVAL (size) != new_size)
946 size = GEN_INT (new_size);
947 return size;
948 }
949
950 align_rtx = GEN_INT (align);
951 alignm1_rtx = GEN_INT (align - 1);
952 }
953 else
954 {
955 /* If crtl->preferred_stack_boundary might still grow, use
956 virtual_preferred_stack_boundary_rtx instead. This will be
957 substituted by the right value in vregs pass and optimized
958 during combine. */
959 align_rtx = virtual_preferred_stack_boundary_rtx;
960 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
961 NULL_RTX);
962 }
963
964 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
965 but we know it can't. So add ourselves and then do
966 TRUNC_DIV_EXPR. */
967 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
968 NULL_RTX, 1, OPTAB_LIB_WIDEN);
969 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
970 NULL_RTX, 1);
971 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
972
973 return size;
974 }
975 \f
976 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
977 to a previously-created save area. If no save area has been allocated,
978 this function will allocate one. If a save area is specified, it
979 must be of the proper mode. */
980
981 void
982 emit_stack_save (enum save_level save_level, rtx *psave)
983 {
984 rtx sa = *psave;
985 /* The default is that we use a move insn and save in a Pmode object. */
986 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
987 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
988
989 /* See if this machine has anything special to do for this kind of save. */
990 switch (save_level)
991 {
992 #ifdef HAVE_save_stack_block
993 case SAVE_BLOCK:
994 if (HAVE_save_stack_block)
995 fcn = gen_save_stack_block;
996 break;
997 #endif
998 #ifdef HAVE_save_stack_function
999 case SAVE_FUNCTION:
1000 if (HAVE_save_stack_function)
1001 fcn = gen_save_stack_function;
1002 break;
1003 #endif
1004 #ifdef HAVE_save_stack_nonlocal
1005 case SAVE_NONLOCAL:
1006 if (HAVE_save_stack_nonlocal)
1007 fcn = gen_save_stack_nonlocal;
1008 break;
1009 #endif
1010 default:
1011 break;
1012 }
1013
1014 /* If there is no save area and we have to allocate one, do so. Otherwise
1015 verify the save area is the proper mode. */
1016
1017 if (sa == 0)
1018 {
1019 if (mode != VOIDmode)
1020 {
1021 if (save_level == SAVE_NONLOCAL)
1022 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1023 else
1024 *psave = sa = gen_reg_rtx (mode);
1025 }
1026 }
1027
1028 do_pending_stack_adjust ();
1029 if (sa != 0)
1030 sa = validize_mem (sa);
1031 emit_insn (fcn (sa, stack_pointer_rtx));
1032 }
1033
1034 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1035 area made by emit_stack_save. If it is zero, we have nothing to do. */
1036
1037 void
1038 emit_stack_restore (enum save_level save_level, rtx sa)
1039 {
1040 /* The default is that we use a move insn. */
1041 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
1042
1043 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1044 STACK_POINTER and HARD_FRAME_POINTER.
1045 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1046 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1047 aligned variables, which is reflected in ix86_can_eliminate.
1048 We normally still have the realigned STACK_POINTER that we can use.
1049 But if there is a stack restore still present at reload, it can trigger
1050 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1051 FRAME_POINTER into a hard reg.
1052 To prevent this situation, we force need_drap if we emit a stack
1053 restore. */
1054 if (SUPPORTS_STACK_ALIGNMENT)
1055 crtl->need_drap = true;
1056
1057 /* See if this machine has anything special to do for this kind of save. */
1058 switch (save_level)
1059 {
1060 #ifdef HAVE_restore_stack_block
1061 case SAVE_BLOCK:
1062 if (HAVE_restore_stack_block)
1063 fcn = gen_restore_stack_block;
1064 break;
1065 #endif
1066 #ifdef HAVE_restore_stack_function
1067 case SAVE_FUNCTION:
1068 if (HAVE_restore_stack_function)
1069 fcn = gen_restore_stack_function;
1070 break;
1071 #endif
1072 #ifdef HAVE_restore_stack_nonlocal
1073 case SAVE_NONLOCAL:
1074 if (HAVE_restore_stack_nonlocal)
1075 fcn = gen_restore_stack_nonlocal;
1076 break;
1077 #endif
1078 default:
1079 break;
1080 }
1081
1082 if (sa != 0)
1083 {
1084 sa = validize_mem (sa);
1085 /* These clobbers prevent the scheduler from moving
1086 references to variable arrays below the code
1087 that deletes (pops) the arrays. */
1088 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1089 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1090 }
1091
1092 discard_pending_stack_adjust ();
1093
1094 emit_insn (fcn (stack_pointer_rtx, sa));
1095 }
1096
1097 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1098 function. This should be called whenever we allocate or deallocate
1099 dynamic stack space. */
1100
1101 void
1102 update_nonlocal_goto_save_area (void)
1103 {
1104 tree t_save;
1105 rtx r_save;
1106
1107 /* The nonlocal_goto_save_area object is an array of N pointers. The
1108 first one is used for the frame pointer save; the rest are sized by
1109 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1110 of the stack save area slots. */
1111 t_save = build4 (ARRAY_REF,
1112 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1113 cfun->nonlocal_goto_save_area,
1114 integer_one_node, NULL_TREE, NULL_TREE);
1115 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1116
1117 emit_stack_save (SAVE_NONLOCAL, &r_save);
1118 }
1119
1120 /* Record a new stack level for the current function. This should be called
1121 whenever we allocate or deallocate dynamic stack space. */
1122
1123 void
1124 record_new_stack_level (void)
1125 {
1126 /* Record the new stack level for nonlocal gotos. */
1127 if (cfun->nonlocal_goto_save_area)
1128 update_nonlocal_goto_save_area ();
1129
1130 /* Record the new stack level for SJLJ exceptions. */
1131 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1132 update_sjlj_context ();
1133 }
1134 \f
1135 /* Return an rtx representing the address of an area of memory dynamically
1136 pushed on the stack.
1137
1138 Any required stack pointer alignment is preserved.
1139
1140 SIZE is an rtx representing the size of the area.
1141
1142 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1143 parameter may be zero. If so, a proper value will be extracted
1144 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1145
1146 REQUIRED_ALIGN is the alignment (in bits) required for the region
1147 of memory.
1148
1149 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1150 stack space allocated by the generated code cannot be added with itself
1151 in the course of the execution of the function. It is always safe to
1152 pass FALSE here and the following criterion is sufficient in order to
1153 pass TRUE: every path in the CFG that starts at the allocation point and
1154 loops to it executes the associated deallocation code. */
1155
1156 rtx
1157 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1158 unsigned required_align, bool cannot_accumulate)
1159 {
1160 HOST_WIDE_INT stack_usage_size = -1;
1161 rtx_code_label *final_label;
1162 rtx final_target, target;
1163 unsigned extra_align = 0;
1164 bool must_align;
1165
1166 /* If we're asking for zero bytes, it doesn't matter what we point
1167 to since we can't dereference it. But return a reasonable
1168 address anyway. */
1169 if (size == const0_rtx)
1170 return virtual_stack_dynamic_rtx;
1171
1172 /* Otherwise, show we're calling alloca or equivalent. */
1173 cfun->calls_alloca = 1;
1174
1175 /* If stack usage info is requested, look into the size we are passed.
1176 We need to do so this early to avoid the obfuscation that may be
1177 introduced later by the various alignment operations. */
1178 if (flag_stack_usage_info)
1179 {
1180 if (CONST_INT_P (size))
1181 stack_usage_size = INTVAL (size);
1182 else if (REG_P (size))
1183 {
1184 /* Look into the last emitted insn and see if we can deduce
1185 something for the register. */
1186 rtx_insn *insn;
1187 rtx set, note;
1188 insn = get_last_insn ();
1189 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1190 {
1191 if (CONST_INT_P (SET_SRC (set)))
1192 stack_usage_size = INTVAL (SET_SRC (set));
1193 else if ((note = find_reg_equal_equiv_note (insn))
1194 && CONST_INT_P (XEXP (note, 0)))
1195 stack_usage_size = INTVAL (XEXP (note, 0));
1196 }
1197 }
1198
1199 /* If the size is not constant, we can't say anything. */
1200 if (stack_usage_size == -1)
1201 {
1202 current_function_has_unbounded_dynamic_stack_size = 1;
1203 stack_usage_size = 0;
1204 }
1205 }
1206
1207 /* Ensure the size is in the proper mode. */
1208 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1209 size = convert_to_mode (Pmode, size, 1);
1210
1211 /* Adjust SIZE_ALIGN, if needed. */
1212 if (CONST_INT_P (size))
1213 {
1214 unsigned HOST_WIDE_INT lsb;
1215
1216 lsb = INTVAL (size);
1217 lsb &= -lsb;
1218
1219 /* Watch out for overflow truncating to "unsigned". */
1220 if (lsb > UINT_MAX / BITS_PER_UNIT)
1221 size_align = 1u << (HOST_BITS_PER_INT - 1);
1222 else
1223 size_align = (unsigned)lsb * BITS_PER_UNIT;
1224 }
1225 else if (size_align < BITS_PER_UNIT)
1226 size_align = BITS_PER_UNIT;
1227
1228 /* We can't attempt to minimize alignment necessary, because we don't
1229 know the final value of preferred_stack_boundary yet while executing
1230 this code. */
1231 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1232 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1233
1234 /* We will need to ensure that the address we return is aligned to
1235 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1236 always know its final value at this point in the compilation (it
1237 might depend on the size of the outgoing parameter lists, for
1238 example), so we must align the value to be returned in that case.
1239 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1240 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1241 We must also do an alignment operation on the returned value if
1242 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1243
1244 If we have to align, we must leave space in SIZE for the hole
1245 that might result from the alignment operation. */
1246
1247 must_align = (crtl->preferred_stack_boundary < required_align);
1248 if (must_align)
1249 {
1250 if (required_align > PREFERRED_STACK_BOUNDARY)
1251 extra_align = PREFERRED_STACK_BOUNDARY;
1252 else if (required_align > STACK_BOUNDARY)
1253 extra_align = STACK_BOUNDARY;
1254 else
1255 extra_align = BITS_PER_UNIT;
1256 }
1257
1258 /* ??? STACK_POINTER_OFFSET is always defined now. */
1259 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1260 must_align = true;
1261 extra_align = BITS_PER_UNIT;
1262 #endif
1263
1264 if (must_align)
1265 {
1266 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1267
1268 size = plus_constant (Pmode, size, extra);
1269 size = force_operand (size, NULL_RTX);
1270
1271 if (flag_stack_usage_info)
1272 stack_usage_size += extra;
1273
1274 if (extra && size_align > extra_align)
1275 size_align = extra_align;
1276 }
1277
1278 /* Round the size to a multiple of the required stack alignment.
1279 Since the stack if presumed to be rounded before this allocation,
1280 this will maintain the required alignment.
1281
1282 If the stack grows downward, we could save an insn by subtracting
1283 SIZE from the stack pointer and then aligning the stack pointer.
1284 The problem with this is that the stack pointer may be unaligned
1285 between the execution of the subtraction and alignment insns and
1286 some machines do not allow this. Even on those that do, some
1287 signal handlers malfunction if a signal should occur between those
1288 insns. Since this is an extremely rare event, we have no reliable
1289 way of knowing which systems have this problem. So we avoid even
1290 momentarily mis-aligning the stack. */
1291 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1292 {
1293 size = round_push (size);
1294
1295 if (flag_stack_usage_info)
1296 {
1297 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1298 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1299 }
1300 }
1301
1302 target = gen_reg_rtx (Pmode);
1303
1304 /* The size is supposed to be fully adjusted at this point so record it
1305 if stack usage info is requested. */
1306 if (flag_stack_usage_info)
1307 {
1308 current_function_dynamic_stack_size += stack_usage_size;
1309
1310 /* ??? This is gross but the only safe stance in the absence
1311 of stack usage oriented flow analysis. */
1312 if (!cannot_accumulate)
1313 current_function_has_unbounded_dynamic_stack_size = 1;
1314 }
1315
1316 final_label = NULL;
1317 final_target = NULL_RTX;
1318
1319 /* If we are splitting the stack, we need to ask the backend whether
1320 there is enough room on the current stack. If there isn't, or if
1321 the backend doesn't know how to tell is, then we need to call a
1322 function to allocate memory in some other way. This memory will
1323 be released when we release the current stack segment. The
1324 effect is that stack allocation becomes less efficient, but at
1325 least it doesn't cause a stack overflow. */
1326 if (flag_split_stack)
1327 {
1328 rtx_code_label *available_label;
1329 rtx ask, space, func;
1330
1331 available_label = NULL;
1332
1333 #ifdef HAVE_split_stack_space_check
1334 if (HAVE_split_stack_space_check)
1335 {
1336 available_label = gen_label_rtx ();
1337
1338 /* This instruction will branch to AVAILABLE_LABEL if there
1339 are SIZE bytes available on the stack. */
1340 emit_insn (gen_split_stack_space_check (size, available_label));
1341 }
1342 #endif
1343
1344 /* The __morestack_allocate_stack_space function will allocate
1345 memory using malloc. If the alignment of the memory returned
1346 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1347 make sure we allocate enough space. */
1348 if (MALLOC_ABI_ALIGNMENT >= required_align)
1349 ask = size;
1350 else
1351 {
1352 ask = expand_binop (Pmode, add_optab, size,
1353 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1354 Pmode),
1355 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1356 must_align = true;
1357 }
1358
1359 func = init_one_libfunc ("__morestack_allocate_stack_space");
1360
1361 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1362 1, ask, Pmode);
1363
1364 if (available_label == NULL_RTX)
1365 return space;
1366
1367 final_target = gen_reg_rtx (Pmode);
1368
1369 emit_move_insn (final_target, space);
1370
1371 final_label = gen_label_rtx ();
1372 emit_jump (final_label);
1373
1374 emit_label (available_label);
1375 }
1376
1377 do_pending_stack_adjust ();
1378
1379 /* We ought to be called always on the toplevel and stack ought to be aligned
1380 properly. */
1381 gcc_assert (!(stack_pointer_delta
1382 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1383
1384 /* If needed, check that we have the required amount of stack. Take into
1385 account what has already been checked. */
1386 if (STACK_CHECK_MOVING_SP)
1387 ;
1388 else if (flag_stack_check == GENERIC_STACK_CHECK)
1389 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1390 size);
1391 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1392 probe_stack_range (STACK_CHECK_PROTECT, size);
1393
1394 /* Don't let anti_adjust_stack emit notes. */
1395 suppress_reg_args_size = true;
1396
1397 /* Perform the required allocation from the stack. Some systems do
1398 this differently than simply incrementing/decrementing from the
1399 stack pointer, such as acquiring the space by calling malloc(). */
1400 #ifdef HAVE_allocate_stack
1401 if (HAVE_allocate_stack)
1402 {
1403 struct expand_operand ops[2];
1404 /* We don't have to check against the predicate for operand 0 since
1405 TARGET is known to be a pseudo of the proper mode, which must
1406 be valid for the operand. */
1407 create_fixed_operand (&ops[0], target);
1408 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1409 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1410 }
1411 else
1412 #endif
1413 {
1414 int saved_stack_pointer_delta;
1415
1416 if (!STACK_GROWS_DOWNWARD)
1417 emit_move_insn (target, virtual_stack_dynamic_rtx);
1418
1419 /* Check stack bounds if necessary. */
1420 if (crtl->limit_stack)
1421 {
1422 rtx available;
1423 rtx_code_label *space_available = gen_label_rtx ();
1424 if (STACK_GROWS_DOWNWARD)
1425 available = expand_binop (Pmode, sub_optab,
1426 stack_pointer_rtx, stack_limit_rtx,
1427 NULL_RTX, 1, OPTAB_WIDEN);
1428 else
1429 available = expand_binop (Pmode, sub_optab,
1430 stack_limit_rtx, stack_pointer_rtx,
1431 NULL_RTX, 1, OPTAB_WIDEN);
1432
1433 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1434 space_available);
1435 #ifdef HAVE_trap
1436 if (HAVE_trap)
1437 emit_insn (gen_trap ());
1438 else
1439 #endif
1440 error ("stack limits not supported on this target");
1441 emit_barrier ();
1442 emit_label (space_available);
1443 }
1444
1445 saved_stack_pointer_delta = stack_pointer_delta;
1446
1447 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1448 anti_adjust_stack_and_probe (size, false);
1449 else
1450 anti_adjust_stack (size);
1451
1452 /* Even if size is constant, don't modify stack_pointer_delta.
1453 The constant size alloca should preserve
1454 crtl->preferred_stack_boundary alignment. */
1455 stack_pointer_delta = saved_stack_pointer_delta;
1456
1457 if (STACK_GROWS_DOWNWARD)
1458 emit_move_insn (target, virtual_stack_dynamic_rtx);
1459 }
1460
1461 suppress_reg_args_size = false;
1462
1463 /* Finish up the split stack handling. */
1464 if (final_label != NULL_RTX)
1465 {
1466 gcc_assert (flag_split_stack);
1467 emit_move_insn (final_target, target);
1468 emit_label (final_label);
1469 target = final_target;
1470 }
1471
1472 if (must_align)
1473 {
1474 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1475 but we know it can't. So add ourselves and then do
1476 TRUNC_DIV_EXPR. */
1477 target = expand_binop (Pmode, add_optab, target,
1478 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1479 Pmode),
1480 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1481 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1482 gen_int_mode (required_align / BITS_PER_UNIT,
1483 Pmode),
1484 NULL_RTX, 1);
1485 target = expand_mult (Pmode, target,
1486 gen_int_mode (required_align / BITS_PER_UNIT,
1487 Pmode),
1488 NULL_RTX, 1);
1489 }
1490
1491 /* Now that we've committed to a return value, mark its alignment. */
1492 mark_reg_pointer (target, required_align);
1493
1494 /* Record the new stack level. */
1495 record_new_stack_level ();
1496
1497 return target;
1498 }
1499 \f
1500 /* A front end may want to override GCC's stack checking by providing a
1501 run-time routine to call to check the stack, so provide a mechanism for
1502 calling that routine. */
1503
1504 static GTY(()) rtx stack_check_libfunc;
1505
1506 void
1507 set_stack_check_libfunc (const char *libfunc_name)
1508 {
1509 gcc_assert (stack_check_libfunc == NULL_RTX);
1510 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1511 }
1512 \f
1513 /* Emit one stack probe at ADDRESS, an address within the stack. */
1514
1515 void
1516 emit_stack_probe (rtx address)
1517 {
1518 #ifdef HAVE_probe_stack_address
1519 if (HAVE_probe_stack_address)
1520 emit_insn (gen_probe_stack_address (address));
1521 else
1522 #endif
1523 {
1524 rtx memref = gen_rtx_MEM (word_mode, address);
1525
1526 MEM_VOLATILE_P (memref) = 1;
1527
1528 /* See if we have an insn to probe the stack. */
1529 #ifdef HAVE_probe_stack
1530 if (HAVE_probe_stack)
1531 emit_insn (gen_probe_stack (memref));
1532 else
1533 #endif
1534 emit_move_insn (memref, const0_rtx);
1535 }
1536 }
1537
1538 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1539 FIRST is a constant and size is a Pmode RTX. These are offsets from
1540 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1541 or subtract them from the stack pointer. */
1542
1543 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1544
1545 #if STACK_GROWS_DOWNWARD
1546 #define STACK_GROW_OP MINUS
1547 #define STACK_GROW_OPTAB sub_optab
1548 #define STACK_GROW_OFF(off) -(off)
1549 #else
1550 #define STACK_GROW_OP PLUS
1551 #define STACK_GROW_OPTAB add_optab
1552 #define STACK_GROW_OFF(off) (off)
1553 #endif
1554
1555 void
1556 probe_stack_range (HOST_WIDE_INT first, rtx size)
1557 {
1558 /* First ensure SIZE is Pmode. */
1559 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1560 size = convert_to_mode (Pmode, size, 1);
1561
1562 /* Next see if we have a function to check the stack. */
1563 if (stack_check_libfunc)
1564 {
1565 rtx addr = memory_address (Pmode,
1566 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1567 stack_pointer_rtx,
1568 plus_constant (Pmode,
1569 size, first)));
1570 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1571 Pmode);
1572 }
1573
1574 /* Next see if we have an insn to check the stack. */
1575 #ifdef HAVE_check_stack
1576 else if (HAVE_check_stack)
1577 {
1578 struct expand_operand ops[1];
1579 rtx addr = memory_address (Pmode,
1580 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1581 stack_pointer_rtx,
1582 plus_constant (Pmode,
1583 size, first)));
1584 bool success;
1585 create_input_operand (&ops[0], addr, Pmode);
1586 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1587 gcc_assert (success);
1588 }
1589 #endif
1590
1591 /* Otherwise we have to generate explicit probes. If we have a constant
1592 small number of them to generate, that's the easy case. */
1593 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1594 {
1595 HOST_WIDE_INT isize = INTVAL (size), i;
1596 rtx addr;
1597
1598 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1599 it exceeds SIZE. If only one probe is needed, this will not
1600 generate any code. Then probe at FIRST + SIZE. */
1601 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1602 {
1603 addr = memory_address (Pmode,
1604 plus_constant (Pmode, stack_pointer_rtx,
1605 STACK_GROW_OFF (first + i)));
1606 emit_stack_probe (addr);
1607 }
1608
1609 addr = memory_address (Pmode,
1610 plus_constant (Pmode, stack_pointer_rtx,
1611 STACK_GROW_OFF (first + isize)));
1612 emit_stack_probe (addr);
1613 }
1614
1615 /* In the variable case, do the same as above, but in a loop. Note that we
1616 must be extra careful with variables wrapping around because we might be
1617 at the very top (or the very bottom) of the address space and we have to
1618 be able to handle this case properly; in particular, we use an equality
1619 test for the loop condition. */
1620 else
1621 {
1622 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1623 rtx_code_label *loop_lab = gen_label_rtx ();
1624 rtx_code_label *end_lab = gen_label_rtx ();
1625
1626 /* Step 1: round SIZE to the previous multiple of the interval. */
1627
1628 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1629 rounded_size
1630 = simplify_gen_binary (AND, Pmode, size,
1631 gen_int_mode (-PROBE_INTERVAL, Pmode));
1632 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1633
1634
1635 /* Step 2: compute initial and final value of the loop counter. */
1636
1637 /* TEST_ADDR = SP + FIRST. */
1638 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1639 stack_pointer_rtx,
1640 gen_int_mode (first, Pmode)),
1641 NULL_RTX);
1642
1643 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1644 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1645 test_addr,
1646 rounded_size_op), NULL_RTX);
1647
1648
1649 /* Step 3: the loop
1650
1651 while (TEST_ADDR != LAST_ADDR)
1652 {
1653 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1654 probe at TEST_ADDR
1655 }
1656
1657 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1658 until it is equal to ROUNDED_SIZE. */
1659
1660 emit_label (loop_lab);
1661
1662 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1663 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1664 end_lab);
1665
1666 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1667 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1668 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1669 1, OPTAB_WIDEN);
1670
1671 gcc_assert (temp == test_addr);
1672
1673 /* Probe at TEST_ADDR. */
1674 emit_stack_probe (test_addr);
1675
1676 emit_jump (loop_lab);
1677
1678 emit_label (end_lab);
1679
1680
1681 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1682 that SIZE is equal to ROUNDED_SIZE. */
1683
1684 /* TEMP = SIZE - ROUNDED_SIZE. */
1685 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1686 if (temp != const0_rtx)
1687 {
1688 rtx addr;
1689
1690 if (CONST_INT_P (temp))
1691 {
1692 /* Use [base + disp} addressing mode if supported. */
1693 HOST_WIDE_INT offset = INTVAL (temp);
1694 addr = memory_address (Pmode,
1695 plus_constant (Pmode, last_addr,
1696 STACK_GROW_OFF (offset)));
1697 }
1698 else
1699 {
1700 /* Manual CSE if the difference is not known at compile-time. */
1701 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1702 addr = memory_address (Pmode,
1703 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1704 last_addr, temp));
1705 }
1706
1707 emit_stack_probe (addr);
1708 }
1709 }
1710
1711 /* Make sure nothing is scheduled before we are done. */
1712 emit_insn (gen_blockage ());
1713 }
1714
1715 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1716 while probing it. This pushes when SIZE is positive. SIZE need not
1717 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1718 by plus SIZE at the end. */
1719
1720 void
1721 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1722 {
1723 /* We skip the probe for the first interval + a small dope of 4 words and
1724 probe that many bytes past the specified size to maintain a protection
1725 area at the botton of the stack. */
1726 const int dope = 4 * UNITS_PER_WORD;
1727
1728 /* First ensure SIZE is Pmode. */
1729 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1730 size = convert_to_mode (Pmode, size, 1);
1731
1732 /* If we have a constant small number of probes to generate, that's the
1733 easy case. */
1734 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1735 {
1736 HOST_WIDE_INT isize = INTVAL (size), i;
1737 bool first_probe = true;
1738
1739 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1740 values of N from 1 until it exceeds SIZE. If only one probe is
1741 needed, this will not generate any code. Then adjust and probe
1742 to PROBE_INTERVAL + SIZE. */
1743 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1744 {
1745 if (first_probe)
1746 {
1747 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1748 first_probe = false;
1749 }
1750 else
1751 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1752 emit_stack_probe (stack_pointer_rtx);
1753 }
1754
1755 if (first_probe)
1756 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1757 else
1758 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1759 emit_stack_probe (stack_pointer_rtx);
1760 }
1761
1762 /* In the variable case, do the same as above, but in a loop. Note that we
1763 must be extra careful with variables wrapping around because we might be
1764 at the very top (or the very bottom) of the address space and we have to
1765 be able to handle this case properly; in particular, we use an equality
1766 test for the loop condition. */
1767 else
1768 {
1769 rtx rounded_size, rounded_size_op, last_addr, temp;
1770 rtx_code_label *loop_lab = gen_label_rtx ();
1771 rtx_code_label *end_lab = gen_label_rtx ();
1772
1773
1774 /* Step 1: round SIZE to the previous multiple of the interval. */
1775
1776 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1777 rounded_size
1778 = simplify_gen_binary (AND, Pmode, size,
1779 gen_int_mode (-PROBE_INTERVAL, Pmode));
1780 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1781
1782
1783 /* Step 2: compute initial and final value of the loop counter. */
1784
1785 /* SP = SP_0 + PROBE_INTERVAL. */
1786 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1787
1788 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1789 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1790 stack_pointer_rtx,
1791 rounded_size_op), NULL_RTX);
1792
1793
1794 /* Step 3: the loop
1795
1796 while (SP != LAST_ADDR)
1797 {
1798 SP = SP + PROBE_INTERVAL
1799 probe at SP
1800 }
1801
1802 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1803 values of N from 1 until it is equal to ROUNDED_SIZE. */
1804
1805 emit_label (loop_lab);
1806
1807 /* Jump to END_LAB if SP == LAST_ADDR. */
1808 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1809 Pmode, 1, end_lab);
1810
1811 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1812 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1813 emit_stack_probe (stack_pointer_rtx);
1814
1815 emit_jump (loop_lab);
1816
1817 emit_label (end_lab);
1818
1819
1820 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1821 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1822
1823 /* TEMP = SIZE - ROUNDED_SIZE. */
1824 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1825 if (temp != const0_rtx)
1826 {
1827 /* Manual CSE if the difference is not known at compile-time. */
1828 if (GET_CODE (temp) != CONST_INT)
1829 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1830 anti_adjust_stack (temp);
1831 emit_stack_probe (stack_pointer_rtx);
1832 }
1833 }
1834
1835 /* Adjust back and account for the additional first interval. */
1836 if (adjust_back)
1837 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1838 else
1839 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1840 }
1841
1842 /* Return an rtx representing the register or memory location
1843 in which a scalar value of data type VALTYPE
1844 was returned by a function call to function FUNC.
1845 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1846 function is known, otherwise 0.
1847 OUTGOING is 1 if on a machine with register windows this function
1848 should return the register in which the function will put its result
1849 and 0 otherwise. */
1850
1851 rtx
1852 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1853 int outgoing ATTRIBUTE_UNUSED)
1854 {
1855 rtx val;
1856
1857 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1858
1859 if (REG_P (val)
1860 && GET_MODE (val) == BLKmode)
1861 {
1862 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1863 machine_mode tmpmode;
1864
1865 /* int_size_in_bytes can return -1. We don't need a check here
1866 since the value of bytes will then be large enough that no
1867 mode will match anyway. */
1868
1869 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1870 tmpmode != VOIDmode;
1871 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1872 {
1873 /* Have we found a large enough mode? */
1874 if (GET_MODE_SIZE (tmpmode) >= bytes)
1875 break;
1876 }
1877
1878 /* No suitable mode found. */
1879 gcc_assert (tmpmode != VOIDmode);
1880
1881 PUT_MODE (val, tmpmode);
1882 }
1883 return val;
1884 }
1885
1886 /* Return an rtx representing the register or memory location
1887 in which a scalar value of mode MODE was returned by a library call. */
1888
1889 rtx
1890 hard_libcall_value (machine_mode mode, rtx fun)
1891 {
1892 return targetm.calls.libcall_value (mode, fun);
1893 }
1894
1895 /* Look up the tree code for a given rtx code
1896 to provide the arithmetic operation for REAL_ARITHMETIC.
1897 The function returns an int because the caller may not know
1898 what `enum tree_code' means. */
1899
1900 int
1901 rtx_to_tree_code (enum rtx_code code)
1902 {
1903 enum tree_code tcode;
1904
1905 switch (code)
1906 {
1907 case PLUS:
1908 tcode = PLUS_EXPR;
1909 break;
1910 case MINUS:
1911 tcode = MINUS_EXPR;
1912 break;
1913 case MULT:
1914 tcode = MULT_EXPR;
1915 break;
1916 case DIV:
1917 tcode = RDIV_EXPR;
1918 break;
1919 case SMIN:
1920 tcode = MIN_EXPR;
1921 break;
1922 case SMAX:
1923 tcode = MAX_EXPR;
1924 break;
1925 default:
1926 tcode = LAST_AND_UNUSED_TREE_CODE;
1927 break;
1928 }
1929 return ((int) tcode);
1930 }
1931
1932 #include "gt-explow.h"