]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
Update check after force_const_mem call in the plus_constant function to see if the...
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "tm.h"
25 #include "diagnostic-core.h"
26 #include "rtl.h"
27 #include "hash-set.h"
28 #include "machmode.h"
29 #include "vec.h"
30 #include "double-int.h"
31 #include "input.h"
32 #include "alias.h"
33 #include "symtab.h"
34 #include "wide-int.h"
35 #include "inchash.h"
36 #include "real.h"
37 #include "tree.h"
38 #include "stor-layout.h"
39 #include "tm_p.h"
40 #include "flags.h"
41 #include "except.h"
42 #include "hard-reg-set.h"
43 #include "function.h"
44 #include "hashtab.h"
45 #include "statistics.h"
46 #include "fixed-value.h"
47 #include "insn-config.h"
48 #include "expmed.h"
49 #include "dojump.h"
50 #include "explow.h"
51 #include "calls.h"
52 #include "emit-rtl.h"
53 #include "varasm.h"
54 #include "stmt.h"
55 #include "expr.h"
56 #include "insn-codes.h"
57 #include "optabs.h"
58 #include "libfuncs.h"
59 #include "ggc.h"
60 #include "recog.h"
61 #include "langhooks.h"
62 #include "target.h"
63 #include "common/common-target.h"
64 #include "output.h"
65
66 static rtx break_out_memory_refs (rtx);
67
68
69 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
70
71 HOST_WIDE_INT
72 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
73 {
74 int width = GET_MODE_PRECISION (mode);
75
76 /* You want to truncate to a _what_? */
77 gcc_assert (SCALAR_INT_MODE_P (mode)
78 || POINTER_BOUNDS_MODE_P (mode));
79
80 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
81 if (mode == BImode)
82 return c & 1 ? STORE_FLAG_VALUE : 0;
83
84 /* Sign-extend for the requested mode. */
85
86 if (width < HOST_BITS_PER_WIDE_INT)
87 {
88 HOST_WIDE_INT sign = 1;
89 sign <<= width - 1;
90 c &= (sign << 1) - 1;
91 c ^= sign;
92 c -= sign;
93 }
94
95 return c;
96 }
97
98 /* Return an rtx for the sum of X and the integer C, given that X has
99 mode MODE. INPLACE is true if X can be modified inplace or false
100 if it must be treated as immutable. */
101
102 rtx
103 plus_constant (machine_mode mode, rtx x, HOST_WIDE_INT c,
104 bool inplace)
105 {
106 RTX_CODE code;
107 rtx y;
108 rtx tem;
109 int all_constant = 0;
110
111 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
112
113 if (c == 0)
114 return x;
115
116 restart:
117
118 code = GET_CODE (x);
119 y = x;
120
121 switch (code)
122 {
123 CASE_CONST_SCALAR_INT:
124 return immed_wide_int_const (wi::add (std::make_pair (x, mode), c),
125 mode);
126 case MEM:
127 /* If this is a reference to the constant pool, try replacing it with
128 a reference to a new constant. If the resulting address isn't
129 valid, don't return it because we have no way to validize it. */
130 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
131 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
132 {
133 tem = plus_constant (mode, get_pool_constant (XEXP (x, 0)), c);
134 tem = force_const_mem (GET_MODE (x), tem);
135 /* Targets may disallow some constants in the constant pool, thus
136 force_const_mem may return NULL_RTX. */
137 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
138 return tem;
139 }
140 break;
141
142 case CONST:
143 /* If adding to something entirely constant, set a flag
144 so that we can add a CONST around the result. */
145 if (inplace && shared_const_p (x))
146 inplace = false;
147 x = XEXP (x, 0);
148 all_constant = 1;
149 goto restart;
150
151 case SYMBOL_REF:
152 case LABEL_REF:
153 all_constant = 1;
154 break;
155
156 case PLUS:
157 /* The interesting case is adding the integer to a sum. Look
158 for constant term in the sum and combine with C. For an
159 integer constant term or a constant term that is not an
160 explicit integer, we combine or group them together anyway.
161
162 We may not immediately return from the recursive call here, lest
163 all_constant gets lost. */
164
165 if (CONSTANT_P (XEXP (x, 1)))
166 {
167 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
168 if (term == const0_rtx)
169 x = XEXP (x, 0);
170 else if (inplace)
171 XEXP (x, 1) = term;
172 else
173 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
174 c = 0;
175 }
176 else if (rtx *const_loc = find_constant_term_loc (&y))
177 {
178 if (!inplace)
179 {
180 /* We need to be careful since X may be shared and we can't
181 modify it in place. */
182 x = copy_rtx (x);
183 const_loc = find_constant_term_loc (&x);
184 }
185 *const_loc = plus_constant (mode, *const_loc, c, true);
186 c = 0;
187 }
188 break;
189
190 default:
191 break;
192 }
193
194 if (c != 0)
195 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
196
197 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
198 return x;
199 else if (all_constant)
200 return gen_rtx_CONST (mode, x);
201 else
202 return x;
203 }
204 \f
205 /* If X is a sum, return a new sum like X but lacking any constant terms.
206 Add all the removed constant terms into *CONSTPTR.
207 X itself is not altered. The result != X if and only if
208 it is not isomorphic to X. */
209
210 rtx
211 eliminate_constant_term (rtx x, rtx *constptr)
212 {
213 rtx x0, x1;
214 rtx tem;
215
216 if (GET_CODE (x) != PLUS)
217 return x;
218
219 /* First handle constants appearing at this level explicitly. */
220 if (CONST_INT_P (XEXP (x, 1))
221 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
222 XEXP (x, 1)))
223 && CONST_INT_P (tem))
224 {
225 *constptr = tem;
226 return eliminate_constant_term (XEXP (x, 0), constptr);
227 }
228
229 tem = const0_rtx;
230 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
231 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
232 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
233 && 0 != (tem = simplify_binary_operation (PLUS, GET_MODE (x),
234 *constptr, tem))
235 && CONST_INT_P (tem))
236 {
237 *constptr = tem;
238 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
239 }
240
241 return x;
242 }
243
244 \f
245 /* Return a copy of X in which all memory references
246 and all constants that involve symbol refs
247 have been replaced with new temporary registers.
248 Also emit code to load the memory locations and constants
249 into those registers.
250
251 If X contains no such constants or memory references,
252 X itself (not a copy) is returned.
253
254 If a constant is found in the address that is not a legitimate constant
255 in an insn, it is left alone in the hope that it might be valid in the
256 address.
257
258 X may contain no arithmetic except addition, subtraction and multiplication.
259 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
260
261 static rtx
262 break_out_memory_refs (rtx x)
263 {
264 if (MEM_P (x)
265 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
266 && GET_MODE (x) != VOIDmode))
267 x = force_reg (GET_MODE (x), x);
268 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
269 || GET_CODE (x) == MULT)
270 {
271 rtx op0 = break_out_memory_refs (XEXP (x, 0));
272 rtx op1 = break_out_memory_refs (XEXP (x, 1));
273
274 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
275 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
276 }
277
278 return x;
279 }
280
281 /* Given X, a memory address in address space AS' pointer mode, convert it to
282 an address in the address space's address mode, or vice versa (TO_MODE says
283 which way). We take advantage of the fact that pointers are not allowed to
284 overflow by commuting arithmetic operations over conversions so that address
285 arithmetic insns can be used. IN_CONST is true if this conversion is inside
286 a CONST. */
287
288 static rtx
289 convert_memory_address_addr_space_1 (machine_mode to_mode ATTRIBUTE_UNUSED,
290 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
291 bool in_const ATTRIBUTE_UNUSED)
292 {
293 #ifndef POINTERS_EXTEND_UNSIGNED
294 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
295 return x;
296 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
297 machine_mode pointer_mode, address_mode, from_mode;
298 rtx temp;
299 enum rtx_code code;
300
301 /* If X already has the right mode, just return it. */
302 if (GET_MODE (x) == to_mode)
303 return x;
304
305 pointer_mode = targetm.addr_space.pointer_mode (as);
306 address_mode = targetm.addr_space.address_mode (as);
307 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
308
309 /* Here we handle some special cases. If none of them apply, fall through
310 to the default case. */
311 switch (GET_CODE (x))
312 {
313 CASE_CONST_SCALAR_INT:
314 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
315 code = TRUNCATE;
316 else if (POINTERS_EXTEND_UNSIGNED < 0)
317 break;
318 else if (POINTERS_EXTEND_UNSIGNED > 0)
319 code = ZERO_EXTEND;
320 else
321 code = SIGN_EXTEND;
322 temp = simplify_unary_operation (code, to_mode, x, from_mode);
323 if (temp)
324 return temp;
325 break;
326
327 case SUBREG:
328 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
329 && GET_MODE (SUBREG_REG (x)) == to_mode)
330 return SUBREG_REG (x);
331 break;
332
333 case LABEL_REF:
334 temp = gen_rtx_LABEL_REF (to_mode, LABEL_REF_LABEL (x));
335 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
336 return temp;
337 break;
338
339 case SYMBOL_REF:
340 temp = shallow_copy_rtx (x);
341 PUT_MODE (temp, to_mode);
342 return temp;
343 break;
344
345 case CONST:
346 return gen_rtx_CONST (to_mode,
347 convert_memory_address_addr_space_1
348 (to_mode, XEXP (x, 0), as, true));
349 break;
350
351 case PLUS:
352 case MULT:
353 /* For addition we can safely permute the conversion and addition
354 operation if one operand is a constant and converting the constant
355 does not change it or if one operand is a constant and we are
356 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
357 We can always safely permute them if we are making the address
358 narrower. Inside a CONST RTL, this is safe for both pointers
359 zero or sign extended as pointers cannot wrap. */
360 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
361 || (GET_CODE (x) == PLUS
362 && CONST_INT_P (XEXP (x, 1))
363 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
364 || XEXP (x, 1) == convert_memory_address_addr_space_1
365 (to_mode, XEXP (x, 1), as, in_const)
366 || POINTERS_EXTEND_UNSIGNED < 0)))
367 return gen_rtx_fmt_ee (GET_CODE (x), to_mode,
368 convert_memory_address_addr_space_1
369 (to_mode, XEXP (x, 0), as, in_const),
370 XEXP (x, 1));
371 break;
372
373 default:
374 break;
375 }
376
377 return convert_modes (to_mode, from_mode,
378 x, POINTERS_EXTEND_UNSIGNED);
379 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
380 }
381
382 /* Given X, a memory address in address space AS' pointer mode, convert it to
383 an address in the address space's address mode, or vice versa (TO_MODE says
384 which way). We take advantage of the fact that pointers are not allowed to
385 overflow by commuting arithmetic operations over conversions so that address
386 arithmetic insns can be used. */
387
388 rtx
389 convert_memory_address_addr_space (machine_mode to_mode, rtx x, addr_space_t as)
390 {
391 return convert_memory_address_addr_space_1 (to_mode, x, as, false);
392 }
393 \f
394
395 /* Return something equivalent to X but valid as a memory address for something
396 of mode MODE in the named address space AS. When X is not itself valid,
397 this works by copying X or subexpressions of it into registers. */
398
399 rtx
400 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
401 {
402 rtx oldx = x;
403 machine_mode address_mode = targetm.addr_space.address_mode (as);
404
405 x = convert_memory_address_addr_space (address_mode, x, as);
406
407 /* By passing constant addresses through registers
408 we get a chance to cse them. */
409 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
410 x = force_reg (address_mode, x);
411
412 /* We get better cse by rejecting indirect addressing at this stage.
413 Let the combiner create indirect addresses where appropriate.
414 For now, generate the code so that the subexpressions useful to share
415 are visible. But not if cse won't be done! */
416 else
417 {
418 if (! cse_not_expected && !REG_P (x))
419 x = break_out_memory_refs (x);
420
421 /* At this point, any valid address is accepted. */
422 if (memory_address_addr_space_p (mode, x, as))
423 goto done;
424
425 /* If it was valid before but breaking out memory refs invalidated it,
426 use it the old way. */
427 if (memory_address_addr_space_p (mode, oldx, as))
428 {
429 x = oldx;
430 goto done;
431 }
432
433 /* Perform machine-dependent transformations on X
434 in certain cases. This is not necessary since the code
435 below can handle all possible cases, but machine-dependent
436 transformations can make better code. */
437 {
438 rtx orig_x = x;
439 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
440 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
441 goto done;
442 }
443
444 /* PLUS and MULT can appear in special ways
445 as the result of attempts to make an address usable for indexing.
446 Usually they are dealt with by calling force_operand, below.
447 But a sum containing constant terms is special
448 if removing them makes the sum a valid address:
449 then we generate that address in a register
450 and index off of it. We do this because it often makes
451 shorter code, and because the addresses thus generated
452 in registers often become common subexpressions. */
453 if (GET_CODE (x) == PLUS)
454 {
455 rtx constant_term = const0_rtx;
456 rtx y = eliminate_constant_term (x, &constant_term);
457 if (constant_term == const0_rtx
458 || ! memory_address_addr_space_p (mode, y, as))
459 x = force_operand (x, NULL_RTX);
460 else
461 {
462 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
463 if (! memory_address_addr_space_p (mode, y, as))
464 x = force_operand (x, NULL_RTX);
465 else
466 x = y;
467 }
468 }
469
470 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
471 x = force_operand (x, NULL_RTX);
472
473 /* If we have a register that's an invalid address,
474 it must be a hard reg of the wrong class. Copy it to a pseudo. */
475 else if (REG_P (x))
476 x = copy_to_reg (x);
477
478 /* Last resort: copy the value to a register, since
479 the register is a valid address. */
480 else
481 x = force_reg (address_mode, x);
482 }
483
484 done:
485
486 gcc_assert (memory_address_addr_space_p (mode, x, as));
487 /* If we didn't change the address, we are done. Otherwise, mark
488 a reg as a pointer if we have REG or REG + CONST_INT. */
489 if (oldx == x)
490 return x;
491 else if (REG_P (x))
492 mark_reg_pointer (x, BITS_PER_UNIT);
493 else if (GET_CODE (x) == PLUS
494 && REG_P (XEXP (x, 0))
495 && CONST_INT_P (XEXP (x, 1)))
496 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
497
498 /* OLDX may have been the address on a temporary. Update the address
499 to indicate that X is now used. */
500 update_temp_slot_address (oldx, x);
501
502 return x;
503 }
504
505 /* If REF is a MEM with an invalid address, change it into a valid address.
506 Pass through anything else unchanged. REF must be an unshared rtx and
507 the function may modify it in-place. */
508
509 rtx
510 validize_mem (rtx ref)
511 {
512 if (!MEM_P (ref))
513 return ref;
514 ref = use_anchored_address (ref);
515 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
516 MEM_ADDR_SPACE (ref)))
517 return ref;
518
519 return replace_equiv_address (ref, XEXP (ref, 0), true);
520 }
521
522 /* If X is a memory reference to a member of an object block, try rewriting
523 it to use an anchor instead. Return the new memory reference on success
524 and the old one on failure. */
525
526 rtx
527 use_anchored_address (rtx x)
528 {
529 rtx base;
530 HOST_WIDE_INT offset;
531 machine_mode mode;
532
533 if (!flag_section_anchors)
534 return x;
535
536 if (!MEM_P (x))
537 return x;
538
539 /* Split the address into a base and offset. */
540 base = XEXP (x, 0);
541 offset = 0;
542 if (GET_CODE (base) == CONST
543 && GET_CODE (XEXP (base, 0)) == PLUS
544 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
545 {
546 offset += INTVAL (XEXP (XEXP (base, 0), 1));
547 base = XEXP (XEXP (base, 0), 0);
548 }
549
550 /* Check whether BASE is suitable for anchors. */
551 if (GET_CODE (base) != SYMBOL_REF
552 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
553 || SYMBOL_REF_ANCHOR_P (base)
554 || SYMBOL_REF_BLOCK (base) == NULL
555 || !targetm.use_anchors_for_symbol_p (base))
556 return x;
557
558 /* Decide where BASE is going to be. */
559 place_block_symbol (base);
560
561 /* Get the anchor we need to use. */
562 offset += SYMBOL_REF_BLOCK_OFFSET (base);
563 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
564 SYMBOL_REF_TLS_MODEL (base));
565
566 /* Work out the offset from the anchor. */
567 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
568
569 /* If we're going to run a CSE pass, force the anchor into a register.
570 We will then be able to reuse registers for several accesses, if the
571 target costs say that that's worthwhile. */
572 mode = GET_MODE (base);
573 if (!cse_not_expected)
574 base = force_reg (mode, base);
575
576 return replace_equiv_address (x, plus_constant (mode, base, offset));
577 }
578 \f
579 /* Copy the value or contents of X to a new temp reg and return that reg. */
580
581 rtx
582 copy_to_reg (rtx x)
583 {
584 rtx temp = gen_reg_rtx (GET_MODE (x));
585
586 /* If not an operand, must be an address with PLUS and MULT so
587 do the computation. */
588 if (! general_operand (x, VOIDmode))
589 x = force_operand (x, temp);
590
591 if (x != temp)
592 emit_move_insn (temp, x);
593
594 return temp;
595 }
596
597 /* Like copy_to_reg but always give the new register mode Pmode
598 in case X is a constant. */
599
600 rtx
601 copy_addr_to_reg (rtx x)
602 {
603 return copy_to_mode_reg (Pmode, x);
604 }
605
606 /* Like copy_to_reg but always give the new register mode MODE
607 in case X is a constant. */
608
609 rtx
610 copy_to_mode_reg (machine_mode mode, rtx x)
611 {
612 rtx temp = gen_reg_rtx (mode);
613
614 /* If not an operand, must be an address with PLUS and MULT so
615 do the computation. */
616 if (! general_operand (x, VOIDmode))
617 x = force_operand (x, temp);
618
619 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
620 if (x != temp)
621 emit_move_insn (temp, x);
622 return temp;
623 }
624
625 /* Load X into a register if it is not already one.
626 Use mode MODE for the register.
627 X should be valid for mode MODE, but it may be a constant which
628 is valid for all integer modes; that's why caller must specify MODE.
629
630 The caller must not alter the value in the register we return,
631 since we mark it as a "constant" register. */
632
633 rtx
634 force_reg (machine_mode mode, rtx x)
635 {
636 rtx temp, set;
637 rtx_insn *insn;
638
639 if (REG_P (x))
640 return x;
641
642 if (general_operand (x, mode))
643 {
644 temp = gen_reg_rtx (mode);
645 insn = emit_move_insn (temp, x);
646 }
647 else
648 {
649 temp = force_operand (x, NULL_RTX);
650 if (REG_P (temp))
651 insn = get_last_insn ();
652 else
653 {
654 rtx temp2 = gen_reg_rtx (mode);
655 insn = emit_move_insn (temp2, temp);
656 temp = temp2;
657 }
658 }
659
660 /* Let optimizers know that TEMP's value never changes
661 and that X can be substituted for it. Don't get confused
662 if INSN set something else (such as a SUBREG of TEMP). */
663 if (CONSTANT_P (x)
664 && (set = single_set (insn)) != 0
665 && SET_DEST (set) == temp
666 && ! rtx_equal_p (x, SET_SRC (set)))
667 set_unique_reg_note (insn, REG_EQUAL, x);
668
669 /* Let optimizers know that TEMP is a pointer, and if so, the
670 known alignment of that pointer. */
671 {
672 unsigned align = 0;
673 if (GET_CODE (x) == SYMBOL_REF)
674 {
675 align = BITS_PER_UNIT;
676 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
677 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
678 }
679 else if (GET_CODE (x) == LABEL_REF)
680 align = BITS_PER_UNIT;
681 else if (GET_CODE (x) == CONST
682 && GET_CODE (XEXP (x, 0)) == PLUS
683 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
684 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
685 {
686 rtx s = XEXP (XEXP (x, 0), 0);
687 rtx c = XEXP (XEXP (x, 0), 1);
688 unsigned sa, ca;
689
690 sa = BITS_PER_UNIT;
691 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
692 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
693
694 if (INTVAL (c) == 0)
695 align = sa;
696 else
697 {
698 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
699 align = MIN (sa, ca);
700 }
701 }
702
703 if (align || (MEM_P (x) && MEM_POINTER (x)))
704 mark_reg_pointer (temp, align);
705 }
706
707 return temp;
708 }
709
710 /* If X is a memory ref, copy its contents to a new temp reg and return
711 that reg. Otherwise, return X. */
712
713 rtx
714 force_not_mem (rtx x)
715 {
716 rtx temp;
717
718 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
719 return x;
720
721 temp = gen_reg_rtx (GET_MODE (x));
722
723 if (MEM_POINTER (x))
724 REG_POINTER (temp) = 1;
725
726 emit_move_insn (temp, x);
727 return temp;
728 }
729
730 /* Copy X to TARGET (if it's nonzero and a reg)
731 or to a new temp reg and return that reg.
732 MODE is the mode to use for X in case it is a constant. */
733
734 rtx
735 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
736 {
737 rtx temp;
738
739 if (target && REG_P (target))
740 temp = target;
741 else
742 temp = gen_reg_rtx (mode);
743
744 emit_move_insn (temp, x);
745 return temp;
746 }
747 \f
748 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
749 PUNSIGNEDP points to the signedness of the type and may be adjusted
750 to show what signedness to use on extension operations.
751
752 FOR_RETURN is nonzero if the caller is promoting the return value
753 of FNDECL, else it is for promoting args. */
754
755 machine_mode
756 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
757 const_tree funtype, int for_return)
758 {
759 /* Called without a type node for a libcall. */
760 if (type == NULL_TREE)
761 {
762 if (INTEGRAL_MODE_P (mode))
763 return targetm.calls.promote_function_mode (NULL_TREE, mode,
764 punsignedp, funtype,
765 for_return);
766 else
767 return mode;
768 }
769
770 switch (TREE_CODE (type))
771 {
772 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
773 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
774 case POINTER_TYPE: case REFERENCE_TYPE:
775 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
776 for_return);
777
778 default:
779 return mode;
780 }
781 }
782 /* Return the mode to use to store a scalar of TYPE and MODE.
783 PUNSIGNEDP points to the signedness of the type and may be adjusted
784 to show what signedness to use on extension operations. */
785
786 machine_mode
787 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
788 int *punsignedp ATTRIBUTE_UNUSED)
789 {
790 #ifdef PROMOTE_MODE
791 enum tree_code code;
792 int unsignedp;
793 #endif
794
795 /* For libcalls this is invoked without TYPE from the backends
796 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
797 case. */
798 if (type == NULL_TREE)
799 return mode;
800
801 /* FIXME: this is the same logic that was there until GCC 4.4, but we
802 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
803 is not defined. The affected targets are M32C, S390, SPARC. */
804 #ifdef PROMOTE_MODE
805 code = TREE_CODE (type);
806 unsignedp = *punsignedp;
807
808 switch (code)
809 {
810 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
811 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
812 PROMOTE_MODE (mode, unsignedp, type);
813 *punsignedp = unsignedp;
814 return mode;
815 break;
816
817 #ifdef POINTERS_EXTEND_UNSIGNED
818 case REFERENCE_TYPE:
819 case POINTER_TYPE:
820 *punsignedp = POINTERS_EXTEND_UNSIGNED;
821 return targetm.addr_space.address_mode
822 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
823 break;
824 #endif
825
826 default:
827 return mode;
828 }
829 #else
830 return mode;
831 #endif
832 }
833
834
835 /* Use one of promote_mode or promote_function_mode to find the promoted
836 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
837 of DECL after promotion. */
838
839 machine_mode
840 promote_decl_mode (const_tree decl, int *punsignedp)
841 {
842 tree type = TREE_TYPE (decl);
843 int unsignedp = TYPE_UNSIGNED (type);
844 machine_mode mode = DECL_MODE (decl);
845 machine_mode pmode;
846
847 if (TREE_CODE (decl) == RESULT_DECL
848 || TREE_CODE (decl) == PARM_DECL)
849 pmode = promote_function_mode (type, mode, &unsignedp,
850 TREE_TYPE (current_function_decl), 2);
851 else
852 pmode = promote_mode (type, mode, &unsignedp);
853
854 if (punsignedp)
855 *punsignedp = unsignedp;
856 return pmode;
857 }
858
859 \f
860 /* Controls the behaviour of {anti_,}adjust_stack. */
861 static bool suppress_reg_args_size;
862
863 /* A helper for adjust_stack and anti_adjust_stack. */
864
865 static void
866 adjust_stack_1 (rtx adjust, bool anti_p)
867 {
868 rtx temp;
869 rtx_insn *insn;
870
871 /* Hereafter anti_p means subtract_p. */
872 if (!STACK_GROWS_DOWNWARD)
873 anti_p = !anti_p;
874
875 temp = expand_binop (Pmode,
876 anti_p ? sub_optab : add_optab,
877 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
878 OPTAB_LIB_WIDEN);
879
880 if (temp != stack_pointer_rtx)
881 insn = emit_move_insn (stack_pointer_rtx, temp);
882 else
883 {
884 insn = get_last_insn ();
885 temp = single_set (insn);
886 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
887 }
888
889 if (!suppress_reg_args_size)
890 add_reg_note (insn, REG_ARGS_SIZE, GEN_INT (stack_pointer_delta));
891 }
892
893 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
894 This pops when ADJUST is positive. ADJUST need not be constant. */
895
896 void
897 adjust_stack (rtx adjust)
898 {
899 if (adjust == const0_rtx)
900 return;
901
902 /* We expect all variable sized adjustments to be multiple of
903 PREFERRED_STACK_BOUNDARY. */
904 if (CONST_INT_P (adjust))
905 stack_pointer_delta -= INTVAL (adjust);
906
907 adjust_stack_1 (adjust, false);
908 }
909
910 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
911 This pushes when ADJUST is positive. ADJUST need not be constant. */
912
913 void
914 anti_adjust_stack (rtx adjust)
915 {
916 if (adjust == const0_rtx)
917 return;
918
919 /* We expect all variable sized adjustments to be multiple of
920 PREFERRED_STACK_BOUNDARY. */
921 if (CONST_INT_P (adjust))
922 stack_pointer_delta += INTVAL (adjust);
923
924 adjust_stack_1 (adjust, true);
925 }
926
927 /* Round the size of a block to be pushed up to the boundary required
928 by this machine. SIZE is the desired size, which need not be constant. */
929
930 static rtx
931 round_push (rtx size)
932 {
933 rtx align_rtx, alignm1_rtx;
934
935 if (!SUPPORTS_STACK_ALIGNMENT
936 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
937 {
938 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
939
940 if (align == 1)
941 return size;
942
943 if (CONST_INT_P (size))
944 {
945 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
946
947 if (INTVAL (size) != new_size)
948 size = GEN_INT (new_size);
949 return size;
950 }
951
952 align_rtx = GEN_INT (align);
953 alignm1_rtx = GEN_INT (align - 1);
954 }
955 else
956 {
957 /* If crtl->preferred_stack_boundary might still grow, use
958 virtual_preferred_stack_boundary_rtx instead. This will be
959 substituted by the right value in vregs pass and optimized
960 during combine. */
961 align_rtx = virtual_preferred_stack_boundary_rtx;
962 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
963 NULL_RTX);
964 }
965
966 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
967 but we know it can't. So add ourselves and then do
968 TRUNC_DIV_EXPR. */
969 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
970 NULL_RTX, 1, OPTAB_LIB_WIDEN);
971 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
972 NULL_RTX, 1);
973 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
974
975 return size;
976 }
977 \f
978 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
979 to a previously-created save area. If no save area has been allocated,
980 this function will allocate one. If a save area is specified, it
981 must be of the proper mode. */
982
983 void
984 emit_stack_save (enum save_level save_level, rtx *psave)
985 {
986 rtx sa = *psave;
987 /* The default is that we use a move insn and save in a Pmode object. */
988 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
989 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
990
991 /* See if this machine has anything special to do for this kind of save. */
992 switch (save_level)
993 {
994 #ifdef HAVE_save_stack_block
995 case SAVE_BLOCK:
996 if (HAVE_save_stack_block)
997 fcn = gen_save_stack_block;
998 break;
999 #endif
1000 #ifdef HAVE_save_stack_function
1001 case SAVE_FUNCTION:
1002 if (HAVE_save_stack_function)
1003 fcn = gen_save_stack_function;
1004 break;
1005 #endif
1006 #ifdef HAVE_save_stack_nonlocal
1007 case SAVE_NONLOCAL:
1008 if (HAVE_save_stack_nonlocal)
1009 fcn = gen_save_stack_nonlocal;
1010 break;
1011 #endif
1012 default:
1013 break;
1014 }
1015
1016 /* If there is no save area and we have to allocate one, do so. Otherwise
1017 verify the save area is the proper mode. */
1018
1019 if (sa == 0)
1020 {
1021 if (mode != VOIDmode)
1022 {
1023 if (save_level == SAVE_NONLOCAL)
1024 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1025 else
1026 *psave = sa = gen_reg_rtx (mode);
1027 }
1028 }
1029
1030 do_pending_stack_adjust ();
1031 if (sa != 0)
1032 sa = validize_mem (sa);
1033 emit_insn (fcn (sa, stack_pointer_rtx));
1034 }
1035
1036 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1037 area made by emit_stack_save. If it is zero, we have nothing to do. */
1038
1039 void
1040 emit_stack_restore (enum save_level save_level, rtx sa)
1041 {
1042 /* The default is that we use a move insn. */
1043 rtx (*fcn) (rtx, rtx) = gen_move_insn_uncast;
1044
1045 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1046 STACK_POINTER and HARD_FRAME_POINTER.
1047 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1048 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1049 aligned variables, which is reflected in ix86_can_eliminate.
1050 We normally still have the realigned STACK_POINTER that we can use.
1051 But if there is a stack restore still present at reload, it can trigger
1052 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1053 FRAME_POINTER into a hard reg.
1054 To prevent this situation, we force need_drap if we emit a stack
1055 restore. */
1056 if (SUPPORTS_STACK_ALIGNMENT)
1057 crtl->need_drap = true;
1058
1059 /* See if this machine has anything special to do for this kind of save. */
1060 switch (save_level)
1061 {
1062 #ifdef HAVE_restore_stack_block
1063 case SAVE_BLOCK:
1064 if (HAVE_restore_stack_block)
1065 fcn = gen_restore_stack_block;
1066 break;
1067 #endif
1068 #ifdef HAVE_restore_stack_function
1069 case SAVE_FUNCTION:
1070 if (HAVE_restore_stack_function)
1071 fcn = gen_restore_stack_function;
1072 break;
1073 #endif
1074 #ifdef HAVE_restore_stack_nonlocal
1075 case SAVE_NONLOCAL:
1076 if (HAVE_restore_stack_nonlocal)
1077 fcn = gen_restore_stack_nonlocal;
1078 break;
1079 #endif
1080 default:
1081 break;
1082 }
1083
1084 if (sa != 0)
1085 {
1086 sa = validize_mem (sa);
1087 /* These clobbers prevent the scheduler from moving
1088 references to variable arrays below the code
1089 that deletes (pops) the arrays. */
1090 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1091 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1092 }
1093
1094 discard_pending_stack_adjust ();
1095
1096 emit_insn (fcn (stack_pointer_rtx, sa));
1097 }
1098
1099 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1100 function. This should be called whenever we allocate or deallocate
1101 dynamic stack space. */
1102
1103 void
1104 update_nonlocal_goto_save_area (void)
1105 {
1106 tree t_save;
1107 rtx r_save;
1108
1109 /* The nonlocal_goto_save_area object is an array of N pointers. The
1110 first one is used for the frame pointer save; the rest are sized by
1111 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1112 of the stack save area slots. */
1113 t_save = build4 (ARRAY_REF,
1114 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1115 cfun->nonlocal_goto_save_area,
1116 integer_one_node, NULL_TREE, NULL_TREE);
1117 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1118
1119 emit_stack_save (SAVE_NONLOCAL, &r_save);
1120 }
1121
1122 /* Record a new stack level for the current function. This should be called
1123 whenever we allocate or deallocate dynamic stack space. */
1124
1125 void
1126 record_new_stack_level (void)
1127 {
1128 /* Record the new stack level for nonlocal gotos. */
1129 if (cfun->nonlocal_goto_save_area)
1130 update_nonlocal_goto_save_area ();
1131
1132 /* Record the new stack level for SJLJ exceptions. */
1133 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1134 update_sjlj_context ();
1135 }
1136 \f
1137 /* Return an rtx representing the address of an area of memory dynamically
1138 pushed on the stack.
1139
1140 Any required stack pointer alignment is preserved.
1141
1142 SIZE is an rtx representing the size of the area.
1143
1144 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1145 parameter may be zero. If so, a proper value will be extracted
1146 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1147
1148 REQUIRED_ALIGN is the alignment (in bits) required for the region
1149 of memory.
1150
1151 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1152 stack space allocated by the generated code cannot be added with itself
1153 in the course of the execution of the function. It is always safe to
1154 pass FALSE here and the following criterion is sufficient in order to
1155 pass TRUE: every path in the CFG that starts at the allocation point and
1156 loops to it executes the associated deallocation code. */
1157
1158 rtx
1159 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1160 unsigned required_align, bool cannot_accumulate)
1161 {
1162 HOST_WIDE_INT stack_usage_size = -1;
1163 rtx_code_label *final_label;
1164 rtx final_target, target;
1165 unsigned extra_align = 0;
1166 bool must_align;
1167
1168 /* If we're asking for zero bytes, it doesn't matter what we point
1169 to since we can't dereference it. But return a reasonable
1170 address anyway. */
1171 if (size == const0_rtx)
1172 return virtual_stack_dynamic_rtx;
1173
1174 /* Otherwise, show we're calling alloca or equivalent. */
1175 cfun->calls_alloca = 1;
1176
1177 /* If stack usage info is requested, look into the size we are passed.
1178 We need to do so this early to avoid the obfuscation that may be
1179 introduced later by the various alignment operations. */
1180 if (flag_stack_usage_info)
1181 {
1182 if (CONST_INT_P (size))
1183 stack_usage_size = INTVAL (size);
1184 else if (REG_P (size))
1185 {
1186 /* Look into the last emitted insn and see if we can deduce
1187 something for the register. */
1188 rtx_insn *insn;
1189 rtx set, note;
1190 insn = get_last_insn ();
1191 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1192 {
1193 if (CONST_INT_P (SET_SRC (set)))
1194 stack_usage_size = INTVAL (SET_SRC (set));
1195 else if ((note = find_reg_equal_equiv_note (insn))
1196 && CONST_INT_P (XEXP (note, 0)))
1197 stack_usage_size = INTVAL (XEXP (note, 0));
1198 }
1199 }
1200
1201 /* If the size is not constant, we can't say anything. */
1202 if (stack_usage_size == -1)
1203 {
1204 current_function_has_unbounded_dynamic_stack_size = 1;
1205 stack_usage_size = 0;
1206 }
1207 }
1208
1209 /* Ensure the size is in the proper mode. */
1210 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1211 size = convert_to_mode (Pmode, size, 1);
1212
1213 /* Adjust SIZE_ALIGN, if needed. */
1214 if (CONST_INT_P (size))
1215 {
1216 unsigned HOST_WIDE_INT lsb;
1217
1218 lsb = INTVAL (size);
1219 lsb &= -lsb;
1220
1221 /* Watch out for overflow truncating to "unsigned". */
1222 if (lsb > UINT_MAX / BITS_PER_UNIT)
1223 size_align = 1u << (HOST_BITS_PER_INT - 1);
1224 else
1225 size_align = (unsigned)lsb * BITS_PER_UNIT;
1226 }
1227 else if (size_align < BITS_PER_UNIT)
1228 size_align = BITS_PER_UNIT;
1229
1230 /* We can't attempt to minimize alignment necessary, because we don't
1231 know the final value of preferred_stack_boundary yet while executing
1232 this code. */
1233 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1234 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1235
1236 /* We will need to ensure that the address we return is aligned to
1237 REQUIRED_ALIGN. If STACK_DYNAMIC_OFFSET is defined, we don't
1238 always know its final value at this point in the compilation (it
1239 might depend on the size of the outgoing parameter lists, for
1240 example), so we must align the value to be returned in that case.
1241 (Note that STACK_DYNAMIC_OFFSET will have a default nonzero value if
1242 STACK_POINTER_OFFSET or ACCUMULATE_OUTGOING_ARGS are defined).
1243 We must also do an alignment operation on the returned value if
1244 the stack pointer alignment is less strict than REQUIRED_ALIGN.
1245
1246 If we have to align, we must leave space in SIZE for the hole
1247 that might result from the alignment operation. */
1248
1249 must_align = (crtl->preferred_stack_boundary < required_align);
1250 if (must_align)
1251 {
1252 if (required_align > PREFERRED_STACK_BOUNDARY)
1253 extra_align = PREFERRED_STACK_BOUNDARY;
1254 else if (required_align > STACK_BOUNDARY)
1255 extra_align = STACK_BOUNDARY;
1256 else
1257 extra_align = BITS_PER_UNIT;
1258 }
1259
1260 /* ??? STACK_POINTER_OFFSET is always defined now. */
1261 #if defined (STACK_DYNAMIC_OFFSET) || defined (STACK_POINTER_OFFSET)
1262 must_align = true;
1263 extra_align = BITS_PER_UNIT;
1264 #endif
1265
1266 if (must_align)
1267 {
1268 unsigned extra = (required_align - extra_align) / BITS_PER_UNIT;
1269
1270 size = plus_constant (Pmode, size, extra);
1271 size = force_operand (size, NULL_RTX);
1272
1273 if (flag_stack_usage_info)
1274 stack_usage_size += extra;
1275
1276 if (extra && size_align > extra_align)
1277 size_align = extra_align;
1278 }
1279
1280 /* Round the size to a multiple of the required stack alignment.
1281 Since the stack if presumed to be rounded before this allocation,
1282 this will maintain the required alignment.
1283
1284 If the stack grows downward, we could save an insn by subtracting
1285 SIZE from the stack pointer and then aligning the stack pointer.
1286 The problem with this is that the stack pointer may be unaligned
1287 between the execution of the subtraction and alignment insns and
1288 some machines do not allow this. Even on those that do, some
1289 signal handlers malfunction if a signal should occur between those
1290 insns. Since this is an extremely rare event, we have no reliable
1291 way of knowing which systems have this problem. So we avoid even
1292 momentarily mis-aligning the stack. */
1293 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1294 {
1295 size = round_push (size);
1296
1297 if (flag_stack_usage_info)
1298 {
1299 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1300 stack_usage_size = (stack_usage_size + align - 1) / align * align;
1301 }
1302 }
1303
1304 target = gen_reg_rtx (Pmode);
1305
1306 /* The size is supposed to be fully adjusted at this point so record it
1307 if stack usage info is requested. */
1308 if (flag_stack_usage_info)
1309 {
1310 current_function_dynamic_stack_size += stack_usage_size;
1311
1312 /* ??? This is gross but the only safe stance in the absence
1313 of stack usage oriented flow analysis. */
1314 if (!cannot_accumulate)
1315 current_function_has_unbounded_dynamic_stack_size = 1;
1316 }
1317
1318 final_label = NULL;
1319 final_target = NULL_RTX;
1320
1321 /* If we are splitting the stack, we need to ask the backend whether
1322 there is enough room on the current stack. If there isn't, or if
1323 the backend doesn't know how to tell is, then we need to call a
1324 function to allocate memory in some other way. This memory will
1325 be released when we release the current stack segment. The
1326 effect is that stack allocation becomes less efficient, but at
1327 least it doesn't cause a stack overflow. */
1328 if (flag_split_stack)
1329 {
1330 rtx_code_label *available_label;
1331 rtx ask, space, func;
1332
1333 available_label = NULL;
1334
1335 #ifdef HAVE_split_stack_space_check
1336 if (HAVE_split_stack_space_check)
1337 {
1338 available_label = gen_label_rtx ();
1339
1340 /* This instruction will branch to AVAILABLE_LABEL if there
1341 are SIZE bytes available on the stack. */
1342 emit_insn (gen_split_stack_space_check (size, available_label));
1343 }
1344 #endif
1345
1346 /* The __morestack_allocate_stack_space function will allocate
1347 memory using malloc. If the alignment of the memory returned
1348 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1349 make sure we allocate enough space. */
1350 if (MALLOC_ABI_ALIGNMENT >= required_align)
1351 ask = size;
1352 else
1353 {
1354 ask = expand_binop (Pmode, add_optab, size,
1355 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1356 Pmode),
1357 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1358 must_align = true;
1359 }
1360
1361 func = init_one_libfunc ("__morestack_allocate_stack_space");
1362
1363 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1364 1, ask, Pmode);
1365
1366 if (available_label == NULL_RTX)
1367 return space;
1368
1369 final_target = gen_reg_rtx (Pmode);
1370
1371 emit_move_insn (final_target, space);
1372
1373 final_label = gen_label_rtx ();
1374 emit_jump (final_label);
1375
1376 emit_label (available_label);
1377 }
1378
1379 do_pending_stack_adjust ();
1380
1381 /* We ought to be called always on the toplevel and stack ought to be aligned
1382 properly. */
1383 gcc_assert (!(stack_pointer_delta
1384 % (PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT)));
1385
1386 /* If needed, check that we have the required amount of stack. Take into
1387 account what has already been checked. */
1388 if (STACK_CHECK_MOVING_SP)
1389 ;
1390 else if (flag_stack_check == GENERIC_STACK_CHECK)
1391 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1392 size);
1393 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1394 probe_stack_range (STACK_CHECK_PROTECT, size);
1395
1396 /* Don't let anti_adjust_stack emit notes. */
1397 suppress_reg_args_size = true;
1398
1399 /* Perform the required allocation from the stack. Some systems do
1400 this differently than simply incrementing/decrementing from the
1401 stack pointer, such as acquiring the space by calling malloc(). */
1402 #ifdef HAVE_allocate_stack
1403 if (HAVE_allocate_stack)
1404 {
1405 struct expand_operand ops[2];
1406 /* We don't have to check against the predicate for operand 0 since
1407 TARGET is known to be a pseudo of the proper mode, which must
1408 be valid for the operand. */
1409 create_fixed_operand (&ops[0], target);
1410 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1411 expand_insn (CODE_FOR_allocate_stack, 2, ops);
1412 }
1413 else
1414 #endif
1415 {
1416 int saved_stack_pointer_delta;
1417
1418 if (!STACK_GROWS_DOWNWARD)
1419 emit_move_insn (target, virtual_stack_dynamic_rtx);
1420
1421 /* Check stack bounds if necessary. */
1422 if (crtl->limit_stack)
1423 {
1424 rtx available;
1425 rtx_code_label *space_available = gen_label_rtx ();
1426 if (STACK_GROWS_DOWNWARD)
1427 available = expand_binop (Pmode, sub_optab,
1428 stack_pointer_rtx, stack_limit_rtx,
1429 NULL_RTX, 1, OPTAB_WIDEN);
1430 else
1431 available = expand_binop (Pmode, sub_optab,
1432 stack_limit_rtx, stack_pointer_rtx,
1433 NULL_RTX, 1, OPTAB_WIDEN);
1434
1435 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1436 space_available);
1437 #ifdef HAVE_trap
1438 if (HAVE_trap)
1439 emit_insn (gen_trap ());
1440 else
1441 #endif
1442 error ("stack limits not supported on this target");
1443 emit_barrier ();
1444 emit_label (space_available);
1445 }
1446
1447 saved_stack_pointer_delta = stack_pointer_delta;
1448
1449 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1450 anti_adjust_stack_and_probe (size, false);
1451 else
1452 anti_adjust_stack (size);
1453
1454 /* Even if size is constant, don't modify stack_pointer_delta.
1455 The constant size alloca should preserve
1456 crtl->preferred_stack_boundary alignment. */
1457 stack_pointer_delta = saved_stack_pointer_delta;
1458
1459 if (STACK_GROWS_DOWNWARD)
1460 emit_move_insn (target, virtual_stack_dynamic_rtx);
1461 }
1462
1463 suppress_reg_args_size = false;
1464
1465 /* Finish up the split stack handling. */
1466 if (final_label != NULL_RTX)
1467 {
1468 gcc_assert (flag_split_stack);
1469 emit_move_insn (final_target, target);
1470 emit_label (final_label);
1471 target = final_target;
1472 }
1473
1474 if (must_align)
1475 {
1476 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1477 but we know it can't. So add ourselves and then do
1478 TRUNC_DIV_EXPR. */
1479 target = expand_binop (Pmode, add_optab, target,
1480 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1481 Pmode),
1482 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1483 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1484 gen_int_mode (required_align / BITS_PER_UNIT,
1485 Pmode),
1486 NULL_RTX, 1);
1487 target = expand_mult (Pmode, target,
1488 gen_int_mode (required_align / BITS_PER_UNIT,
1489 Pmode),
1490 NULL_RTX, 1);
1491 }
1492
1493 /* Now that we've committed to a return value, mark its alignment. */
1494 mark_reg_pointer (target, required_align);
1495
1496 /* Record the new stack level. */
1497 record_new_stack_level ();
1498
1499 return target;
1500 }
1501 \f
1502 /* A front end may want to override GCC's stack checking by providing a
1503 run-time routine to call to check the stack, so provide a mechanism for
1504 calling that routine. */
1505
1506 static GTY(()) rtx stack_check_libfunc;
1507
1508 void
1509 set_stack_check_libfunc (const char *libfunc_name)
1510 {
1511 gcc_assert (stack_check_libfunc == NULL_RTX);
1512 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1513 }
1514 \f
1515 /* Emit one stack probe at ADDRESS, an address within the stack. */
1516
1517 void
1518 emit_stack_probe (rtx address)
1519 {
1520 #ifdef HAVE_probe_stack_address
1521 if (HAVE_probe_stack_address)
1522 emit_insn (gen_probe_stack_address (address));
1523 else
1524 #endif
1525 {
1526 rtx memref = gen_rtx_MEM (word_mode, address);
1527
1528 MEM_VOLATILE_P (memref) = 1;
1529
1530 /* See if we have an insn to probe the stack. */
1531 #ifdef HAVE_probe_stack
1532 if (HAVE_probe_stack)
1533 emit_insn (gen_probe_stack (memref));
1534 else
1535 #endif
1536 emit_move_insn (memref, const0_rtx);
1537 }
1538 }
1539
1540 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1541 FIRST is a constant and size is a Pmode RTX. These are offsets from
1542 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1543 or subtract them from the stack pointer. */
1544
1545 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1546
1547 #if STACK_GROWS_DOWNWARD
1548 #define STACK_GROW_OP MINUS
1549 #define STACK_GROW_OPTAB sub_optab
1550 #define STACK_GROW_OFF(off) -(off)
1551 #else
1552 #define STACK_GROW_OP PLUS
1553 #define STACK_GROW_OPTAB add_optab
1554 #define STACK_GROW_OFF(off) (off)
1555 #endif
1556
1557 void
1558 probe_stack_range (HOST_WIDE_INT first, rtx size)
1559 {
1560 /* First ensure SIZE is Pmode. */
1561 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1562 size = convert_to_mode (Pmode, size, 1);
1563
1564 /* Next see if we have a function to check the stack. */
1565 if (stack_check_libfunc)
1566 {
1567 rtx addr = memory_address (Pmode,
1568 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1569 stack_pointer_rtx,
1570 plus_constant (Pmode,
1571 size, first)));
1572 emit_library_call (stack_check_libfunc, LCT_NORMAL, VOIDmode, 1, addr,
1573 Pmode);
1574 }
1575
1576 /* Next see if we have an insn to check the stack. */
1577 #ifdef HAVE_check_stack
1578 else if (HAVE_check_stack)
1579 {
1580 struct expand_operand ops[1];
1581 rtx addr = memory_address (Pmode,
1582 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1583 stack_pointer_rtx,
1584 plus_constant (Pmode,
1585 size, first)));
1586 bool success;
1587 create_input_operand (&ops[0], addr, Pmode);
1588 success = maybe_expand_insn (CODE_FOR_check_stack, 1, ops);
1589 gcc_assert (success);
1590 }
1591 #endif
1592
1593 /* Otherwise we have to generate explicit probes. If we have a constant
1594 small number of them to generate, that's the easy case. */
1595 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1596 {
1597 HOST_WIDE_INT isize = INTVAL (size), i;
1598 rtx addr;
1599
1600 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1601 it exceeds SIZE. If only one probe is needed, this will not
1602 generate any code. Then probe at FIRST + SIZE. */
1603 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1604 {
1605 addr = memory_address (Pmode,
1606 plus_constant (Pmode, stack_pointer_rtx,
1607 STACK_GROW_OFF (first + i)));
1608 emit_stack_probe (addr);
1609 }
1610
1611 addr = memory_address (Pmode,
1612 plus_constant (Pmode, stack_pointer_rtx,
1613 STACK_GROW_OFF (first + isize)));
1614 emit_stack_probe (addr);
1615 }
1616
1617 /* In the variable case, do the same as above, but in a loop. Note that we
1618 must be extra careful with variables wrapping around because we might be
1619 at the very top (or the very bottom) of the address space and we have to
1620 be able to handle this case properly; in particular, we use an equality
1621 test for the loop condition. */
1622 else
1623 {
1624 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1625 rtx_code_label *loop_lab = gen_label_rtx ();
1626 rtx_code_label *end_lab = gen_label_rtx ();
1627
1628 /* Step 1: round SIZE to the previous multiple of the interval. */
1629
1630 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1631 rounded_size
1632 = simplify_gen_binary (AND, Pmode, size,
1633 gen_int_mode (-PROBE_INTERVAL, Pmode));
1634 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1635
1636
1637 /* Step 2: compute initial and final value of the loop counter. */
1638
1639 /* TEST_ADDR = SP + FIRST. */
1640 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1641 stack_pointer_rtx,
1642 gen_int_mode (first, Pmode)),
1643 NULL_RTX);
1644
1645 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1646 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1647 test_addr,
1648 rounded_size_op), NULL_RTX);
1649
1650
1651 /* Step 3: the loop
1652
1653 while (TEST_ADDR != LAST_ADDR)
1654 {
1655 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1656 probe at TEST_ADDR
1657 }
1658
1659 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1660 until it is equal to ROUNDED_SIZE. */
1661
1662 emit_label (loop_lab);
1663
1664 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1665 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1666 end_lab);
1667
1668 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1669 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1670 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1671 1, OPTAB_WIDEN);
1672
1673 gcc_assert (temp == test_addr);
1674
1675 /* Probe at TEST_ADDR. */
1676 emit_stack_probe (test_addr);
1677
1678 emit_jump (loop_lab);
1679
1680 emit_label (end_lab);
1681
1682
1683 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1684 that SIZE is equal to ROUNDED_SIZE. */
1685
1686 /* TEMP = SIZE - ROUNDED_SIZE. */
1687 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1688 if (temp != const0_rtx)
1689 {
1690 rtx addr;
1691
1692 if (CONST_INT_P (temp))
1693 {
1694 /* Use [base + disp} addressing mode if supported. */
1695 HOST_WIDE_INT offset = INTVAL (temp);
1696 addr = memory_address (Pmode,
1697 plus_constant (Pmode, last_addr,
1698 STACK_GROW_OFF (offset)));
1699 }
1700 else
1701 {
1702 /* Manual CSE if the difference is not known at compile-time. */
1703 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1704 addr = memory_address (Pmode,
1705 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1706 last_addr, temp));
1707 }
1708
1709 emit_stack_probe (addr);
1710 }
1711 }
1712
1713 /* Make sure nothing is scheduled before we are done. */
1714 emit_insn (gen_blockage ());
1715 }
1716
1717 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1718 while probing it. This pushes when SIZE is positive. SIZE need not
1719 be constant. If ADJUST_BACK is true, adjust back the stack pointer
1720 by plus SIZE at the end. */
1721
1722 void
1723 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
1724 {
1725 /* We skip the probe for the first interval + a small dope of 4 words and
1726 probe that many bytes past the specified size to maintain a protection
1727 area at the botton of the stack. */
1728 const int dope = 4 * UNITS_PER_WORD;
1729
1730 /* First ensure SIZE is Pmode. */
1731 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1732 size = convert_to_mode (Pmode, size, 1);
1733
1734 /* If we have a constant small number of probes to generate, that's the
1735 easy case. */
1736 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1737 {
1738 HOST_WIDE_INT isize = INTVAL (size), i;
1739 bool first_probe = true;
1740
1741 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
1742 values of N from 1 until it exceeds SIZE. If only one probe is
1743 needed, this will not generate any code. Then adjust and probe
1744 to PROBE_INTERVAL + SIZE. */
1745 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1746 {
1747 if (first_probe)
1748 {
1749 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
1750 first_probe = false;
1751 }
1752 else
1753 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1754 emit_stack_probe (stack_pointer_rtx);
1755 }
1756
1757 if (first_probe)
1758 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1759 else
1760 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
1761 emit_stack_probe (stack_pointer_rtx);
1762 }
1763
1764 /* In the variable case, do the same as above, but in a loop. Note that we
1765 must be extra careful with variables wrapping around because we might be
1766 at the very top (or the very bottom) of the address space and we have to
1767 be able to handle this case properly; in particular, we use an equality
1768 test for the loop condition. */
1769 else
1770 {
1771 rtx rounded_size, rounded_size_op, last_addr, temp;
1772 rtx_code_label *loop_lab = gen_label_rtx ();
1773 rtx_code_label *end_lab = gen_label_rtx ();
1774
1775
1776 /* Step 1: round SIZE to the previous multiple of the interval. */
1777
1778 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1779 rounded_size
1780 = simplify_gen_binary (AND, Pmode, size,
1781 gen_int_mode (-PROBE_INTERVAL, Pmode));
1782 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1783
1784
1785 /* Step 2: compute initial and final value of the loop counter. */
1786
1787 /* SP = SP_0 + PROBE_INTERVAL. */
1788 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1789
1790 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
1791 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1792 stack_pointer_rtx,
1793 rounded_size_op), NULL_RTX);
1794
1795
1796 /* Step 3: the loop
1797
1798 while (SP != LAST_ADDR)
1799 {
1800 SP = SP + PROBE_INTERVAL
1801 probe at SP
1802 }
1803
1804 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
1805 values of N from 1 until it is equal to ROUNDED_SIZE. */
1806
1807 emit_label (loop_lab);
1808
1809 /* Jump to END_LAB if SP == LAST_ADDR. */
1810 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1811 Pmode, 1, end_lab);
1812
1813 /* SP = SP + PROBE_INTERVAL and probe at SP. */
1814 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
1815 emit_stack_probe (stack_pointer_rtx);
1816
1817 emit_jump (loop_lab);
1818
1819 emit_label (end_lab);
1820
1821
1822 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
1823 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
1824
1825 /* TEMP = SIZE - ROUNDED_SIZE. */
1826 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1827 if (temp != const0_rtx)
1828 {
1829 /* Manual CSE if the difference is not known at compile-time. */
1830 if (GET_CODE (temp) != CONST_INT)
1831 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1832 anti_adjust_stack (temp);
1833 emit_stack_probe (stack_pointer_rtx);
1834 }
1835 }
1836
1837 /* Adjust back and account for the additional first interval. */
1838 if (adjust_back)
1839 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
1840 else
1841 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
1842 }
1843
1844 /* Return an rtx representing the register or memory location
1845 in which a scalar value of data type VALTYPE
1846 was returned by a function call to function FUNC.
1847 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
1848 function is known, otherwise 0.
1849 OUTGOING is 1 if on a machine with register windows this function
1850 should return the register in which the function will put its result
1851 and 0 otherwise. */
1852
1853 rtx
1854 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
1855 int outgoing ATTRIBUTE_UNUSED)
1856 {
1857 rtx val;
1858
1859 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
1860
1861 if (REG_P (val)
1862 && GET_MODE (val) == BLKmode)
1863 {
1864 unsigned HOST_WIDE_INT bytes = int_size_in_bytes (valtype);
1865 machine_mode tmpmode;
1866
1867 /* int_size_in_bytes can return -1. We don't need a check here
1868 since the value of bytes will then be large enough that no
1869 mode will match anyway. */
1870
1871 for (tmpmode = GET_CLASS_NARROWEST_MODE (MODE_INT);
1872 tmpmode != VOIDmode;
1873 tmpmode = GET_MODE_WIDER_MODE (tmpmode))
1874 {
1875 /* Have we found a large enough mode? */
1876 if (GET_MODE_SIZE (tmpmode) >= bytes)
1877 break;
1878 }
1879
1880 /* No suitable mode found. */
1881 gcc_assert (tmpmode != VOIDmode);
1882
1883 PUT_MODE (val, tmpmode);
1884 }
1885 return val;
1886 }
1887
1888 /* Return an rtx representing the register or memory location
1889 in which a scalar value of mode MODE was returned by a library call. */
1890
1891 rtx
1892 hard_libcall_value (machine_mode mode, rtx fun)
1893 {
1894 return targetm.calls.libcall_value (mode, fun);
1895 }
1896
1897 /* Look up the tree code for a given rtx code
1898 to provide the arithmetic operation for REAL_ARITHMETIC.
1899 The function returns an int because the caller may not know
1900 what `enum tree_code' means. */
1901
1902 int
1903 rtx_to_tree_code (enum rtx_code code)
1904 {
1905 enum tree_code tcode;
1906
1907 switch (code)
1908 {
1909 case PLUS:
1910 tcode = PLUS_EXPR;
1911 break;
1912 case MINUS:
1913 tcode = MINUS_EXPR;
1914 break;
1915 case MULT:
1916 tcode = MULT_EXPR;
1917 break;
1918 case DIV:
1919 tcode = RDIV_EXPR;
1920 break;
1921 case SMIN:
1922 tcode = MIN_EXPR;
1923 break;
1924 case SMAX:
1925 tcode = MAX_EXPR;
1926 break;
1927 default:
1928 tcode = LAST_AND_UNUSED_TREE_CODE;
1929 break;
1930 }
1931 return ((int) tcode);
1932 }
1933
1934 #include "gt-explow.h"