]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/explow.c
generalized IPA predicate on parameter
[thirdparty/gcc.git] / gcc / explow.c
1 /* Subroutines for manipulating rtx's in semantically interesting ways.
2 Copyright (C) 1987-2019 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "target.h"
25 #include "function.h"
26 #include "rtl.h"
27 #include "tree.h"
28 #include "memmodel.h"
29 #include "tm_p.h"
30 #include "expmed.h"
31 #include "profile-count.h"
32 #include "optabs.h"
33 #include "emit-rtl.h"
34 #include "recog.h"
35 #include "diagnostic-core.h"
36 #include "stor-layout.h"
37 #include "except.h"
38 #include "dojump.h"
39 #include "explow.h"
40 #include "expr.h"
41 #include "common/common-target.h"
42 #include "output.h"
43 #include "params.h"
44
45 static rtx break_out_memory_refs (rtx);
46 static void anti_adjust_stack_and_probe_stack_clash (rtx);
47
48
49 /* Truncate and perhaps sign-extend C as appropriate for MODE. */
50
51 HOST_WIDE_INT
52 trunc_int_for_mode (HOST_WIDE_INT c, machine_mode mode)
53 {
54 /* Not scalar_int_mode because we also allow pointer bound modes. */
55 scalar_mode smode = as_a <scalar_mode> (mode);
56 int width = GET_MODE_PRECISION (smode);
57
58 /* You want to truncate to a _what_? */
59 gcc_assert (SCALAR_INT_MODE_P (mode));
60
61 /* Canonicalize BImode to 0 and STORE_FLAG_VALUE. */
62 if (smode == BImode)
63 return c & 1 ? STORE_FLAG_VALUE : 0;
64
65 /* Sign-extend for the requested mode. */
66
67 if (width < HOST_BITS_PER_WIDE_INT)
68 {
69 HOST_WIDE_INT sign = 1;
70 sign <<= width - 1;
71 c &= (sign << 1) - 1;
72 c ^= sign;
73 c -= sign;
74 }
75
76 return c;
77 }
78
79 /* Likewise for polynomial values, using the sign-extended representation
80 for each individual coefficient. */
81
82 poly_int64
83 trunc_int_for_mode (poly_int64 x, machine_mode mode)
84 {
85 for (unsigned int i = 0; i < NUM_POLY_INT_COEFFS; ++i)
86 x.coeffs[i] = trunc_int_for_mode (x.coeffs[i], mode);
87 return x;
88 }
89
90 /* Return an rtx for the sum of X and the integer C, given that X has
91 mode MODE. INPLACE is true if X can be modified inplace or false
92 if it must be treated as immutable. */
93
94 rtx
95 plus_constant (machine_mode mode, rtx x, poly_int64 c, bool inplace)
96 {
97 RTX_CODE code;
98 rtx y;
99 rtx tem;
100 int all_constant = 0;
101
102 gcc_assert (GET_MODE (x) == VOIDmode || GET_MODE (x) == mode);
103
104 if (known_eq (c, 0))
105 return x;
106
107 restart:
108
109 code = GET_CODE (x);
110 y = x;
111
112 switch (code)
113 {
114 CASE_CONST_SCALAR_INT:
115 return immed_wide_int_const (wi::add (rtx_mode_t (x, mode), c), mode);
116 case MEM:
117 /* If this is a reference to the constant pool, try replacing it with
118 a reference to a new constant. If the resulting address isn't
119 valid, don't return it because we have no way to validize it. */
120 if (GET_CODE (XEXP (x, 0)) == SYMBOL_REF
121 && CONSTANT_POOL_ADDRESS_P (XEXP (x, 0)))
122 {
123 rtx cst = get_pool_constant (XEXP (x, 0));
124
125 if (GET_CODE (cst) == CONST_VECTOR
126 && GET_MODE_INNER (GET_MODE (cst)) == mode)
127 {
128 cst = gen_lowpart (mode, cst);
129 gcc_assert (cst);
130 }
131 if (GET_MODE (cst) == VOIDmode || GET_MODE (cst) == mode)
132 {
133 tem = plus_constant (mode, cst, c);
134 tem = force_const_mem (GET_MODE (x), tem);
135 /* Targets may disallow some constants in the constant pool, thus
136 force_const_mem may return NULL_RTX. */
137 if (tem && memory_address_p (GET_MODE (tem), XEXP (tem, 0)))
138 return tem;
139 }
140 }
141 break;
142
143 case CONST:
144 /* If adding to something entirely constant, set a flag
145 so that we can add a CONST around the result. */
146 if (inplace && shared_const_p (x))
147 inplace = false;
148 x = XEXP (x, 0);
149 all_constant = 1;
150 goto restart;
151
152 case SYMBOL_REF:
153 case LABEL_REF:
154 all_constant = 1;
155 break;
156
157 case PLUS:
158 /* The interesting case is adding the integer to a sum. Look
159 for constant term in the sum and combine with C. For an
160 integer constant term or a constant term that is not an
161 explicit integer, we combine or group them together anyway.
162
163 We may not immediately return from the recursive call here, lest
164 all_constant gets lost. */
165
166 if (CONSTANT_P (XEXP (x, 1)))
167 {
168 rtx term = plus_constant (mode, XEXP (x, 1), c, inplace);
169 if (term == const0_rtx)
170 x = XEXP (x, 0);
171 else if (inplace)
172 XEXP (x, 1) = term;
173 else
174 x = gen_rtx_PLUS (mode, XEXP (x, 0), term);
175 c = 0;
176 }
177 else if (rtx *const_loc = find_constant_term_loc (&y))
178 {
179 if (!inplace)
180 {
181 /* We need to be careful since X may be shared and we can't
182 modify it in place. */
183 x = copy_rtx (x);
184 const_loc = find_constant_term_loc (&x);
185 }
186 *const_loc = plus_constant (mode, *const_loc, c, true);
187 c = 0;
188 }
189 break;
190
191 default:
192 if (CONST_POLY_INT_P (x))
193 return immed_wide_int_const (const_poly_int_value (x) + c, mode);
194 break;
195 }
196
197 if (maybe_ne (c, 0))
198 x = gen_rtx_PLUS (mode, x, gen_int_mode (c, mode));
199
200 if (GET_CODE (x) == SYMBOL_REF || GET_CODE (x) == LABEL_REF)
201 return x;
202 else if (all_constant)
203 return gen_rtx_CONST (mode, x);
204 else
205 return x;
206 }
207 \f
208 /* If X is a sum, return a new sum like X but lacking any constant terms.
209 Add all the removed constant terms into *CONSTPTR.
210 X itself is not altered. The result != X if and only if
211 it is not isomorphic to X. */
212
213 rtx
214 eliminate_constant_term (rtx x, rtx *constptr)
215 {
216 rtx x0, x1;
217 rtx tem;
218
219 if (GET_CODE (x) != PLUS)
220 return x;
221
222 /* First handle constants appearing at this level explicitly. */
223 if (CONST_INT_P (XEXP (x, 1))
224 && (tem = simplify_binary_operation (PLUS, GET_MODE (x), *constptr,
225 XEXP (x, 1))) != 0
226 && CONST_INT_P (tem))
227 {
228 *constptr = tem;
229 return eliminate_constant_term (XEXP (x, 0), constptr);
230 }
231
232 tem = const0_rtx;
233 x0 = eliminate_constant_term (XEXP (x, 0), &tem);
234 x1 = eliminate_constant_term (XEXP (x, 1), &tem);
235 if ((x1 != XEXP (x, 1) || x0 != XEXP (x, 0))
236 && (tem = simplify_binary_operation (PLUS, GET_MODE (x),
237 *constptr, tem)) != 0
238 && CONST_INT_P (tem))
239 {
240 *constptr = tem;
241 return gen_rtx_PLUS (GET_MODE (x), x0, x1);
242 }
243
244 return x;
245 }
246
247 \f
248 /* Return a copy of X in which all memory references
249 and all constants that involve symbol refs
250 have been replaced with new temporary registers.
251 Also emit code to load the memory locations and constants
252 into those registers.
253
254 If X contains no such constants or memory references,
255 X itself (not a copy) is returned.
256
257 If a constant is found in the address that is not a legitimate constant
258 in an insn, it is left alone in the hope that it might be valid in the
259 address.
260
261 X may contain no arithmetic except addition, subtraction and multiplication.
262 Values returned by expand_expr with 1 for sum_ok fit this constraint. */
263
264 static rtx
265 break_out_memory_refs (rtx x)
266 {
267 if (MEM_P (x)
268 || (CONSTANT_P (x) && CONSTANT_ADDRESS_P (x)
269 && GET_MODE (x) != VOIDmode))
270 x = force_reg (GET_MODE (x), x);
271 else if (GET_CODE (x) == PLUS || GET_CODE (x) == MINUS
272 || GET_CODE (x) == MULT)
273 {
274 rtx op0 = break_out_memory_refs (XEXP (x, 0));
275 rtx op1 = break_out_memory_refs (XEXP (x, 1));
276
277 if (op0 != XEXP (x, 0) || op1 != XEXP (x, 1))
278 x = simplify_gen_binary (GET_CODE (x), GET_MODE (x), op0, op1);
279 }
280
281 return x;
282 }
283
284 /* Given X, a memory address in address space AS' pointer mode, convert it to
285 an address in the address space's address mode, or vice versa (TO_MODE says
286 which way). We take advantage of the fact that pointers are not allowed to
287 overflow by commuting arithmetic operations over conversions so that address
288 arithmetic insns can be used. IN_CONST is true if this conversion is inside
289 a CONST. NO_EMIT is true if no insns should be emitted, and instead
290 it should return NULL if it can't be simplified without emitting insns. */
291
292 rtx
293 convert_memory_address_addr_space_1 (scalar_int_mode to_mode ATTRIBUTE_UNUSED,
294 rtx x, addr_space_t as ATTRIBUTE_UNUSED,
295 bool in_const ATTRIBUTE_UNUSED,
296 bool no_emit ATTRIBUTE_UNUSED)
297 {
298 #ifndef POINTERS_EXTEND_UNSIGNED
299 gcc_assert (GET_MODE (x) == to_mode || GET_MODE (x) == VOIDmode);
300 return x;
301 #else /* defined(POINTERS_EXTEND_UNSIGNED) */
302 scalar_int_mode pointer_mode, address_mode, from_mode;
303 rtx temp;
304 enum rtx_code code;
305
306 /* If X already has the right mode, just return it. */
307 if (GET_MODE (x) == to_mode)
308 return x;
309
310 pointer_mode = targetm.addr_space.pointer_mode (as);
311 address_mode = targetm.addr_space.address_mode (as);
312 from_mode = to_mode == pointer_mode ? address_mode : pointer_mode;
313
314 /* Here we handle some special cases. If none of them apply, fall through
315 to the default case. */
316 switch (GET_CODE (x))
317 {
318 CASE_CONST_SCALAR_INT:
319 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode))
320 code = TRUNCATE;
321 else if (POINTERS_EXTEND_UNSIGNED < 0)
322 break;
323 else if (POINTERS_EXTEND_UNSIGNED > 0)
324 code = ZERO_EXTEND;
325 else
326 code = SIGN_EXTEND;
327 temp = simplify_unary_operation (code, to_mode, x, from_mode);
328 if (temp)
329 return temp;
330 break;
331
332 case SUBREG:
333 if ((SUBREG_PROMOTED_VAR_P (x) || REG_POINTER (SUBREG_REG (x)))
334 && GET_MODE (SUBREG_REG (x)) == to_mode)
335 return SUBREG_REG (x);
336 break;
337
338 case LABEL_REF:
339 temp = gen_rtx_LABEL_REF (to_mode, label_ref_label (x));
340 LABEL_REF_NONLOCAL_P (temp) = LABEL_REF_NONLOCAL_P (x);
341 return temp;
342
343 case SYMBOL_REF:
344 temp = shallow_copy_rtx (x);
345 PUT_MODE (temp, to_mode);
346 return temp;
347
348 case CONST:
349 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0), as,
350 true, no_emit);
351 return temp ? gen_rtx_CONST (to_mode, temp) : temp;
352
353 case PLUS:
354 case MULT:
355 /* For addition we can safely permute the conversion and addition
356 operation if one operand is a constant and converting the constant
357 does not change it or if one operand is a constant and we are
358 using a ptr_extend instruction (POINTERS_EXTEND_UNSIGNED < 0).
359 We can always safely permute them if we are making the address
360 narrower. Inside a CONST RTL, this is safe for both pointers
361 zero or sign extended as pointers cannot wrap. */
362 if (GET_MODE_SIZE (to_mode) < GET_MODE_SIZE (from_mode)
363 || (GET_CODE (x) == PLUS
364 && CONST_INT_P (XEXP (x, 1))
365 && ((in_const && POINTERS_EXTEND_UNSIGNED != 0)
366 || XEXP (x, 1) == convert_memory_address_addr_space_1
367 (to_mode, XEXP (x, 1), as, in_const,
368 no_emit)
369 || POINTERS_EXTEND_UNSIGNED < 0)))
370 {
371 temp = convert_memory_address_addr_space_1 (to_mode, XEXP (x, 0),
372 as, in_const, no_emit);
373 return (temp ? gen_rtx_fmt_ee (GET_CODE (x), to_mode,
374 temp, XEXP (x, 1))
375 : temp);
376 }
377 break;
378
379 default:
380 break;
381 }
382
383 if (no_emit)
384 return NULL_RTX;
385
386 return convert_modes (to_mode, from_mode,
387 x, POINTERS_EXTEND_UNSIGNED);
388 #endif /* defined(POINTERS_EXTEND_UNSIGNED) */
389 }
390
391 /* Given X, a memory address in address space AS' pointer mode, convert it to
392 an address in the address space's address mode, or vice versa (TO_MODE says
393 which way). We take advantage of the fact that pointers are not allowed to
394 overflow by commuting arithmetic operations over conversions so that address
395 arithmetic insns can be used. */
396
397 rtx
398 convert_memory_address_addr_space (scalar_int_mode to_mode, rtx x,
399 addr_space_t as)
400 {
401 return convert_memory_address_addr_space_1 (to_mode, x, as, false, false);
402 }
403 \f
404
405 /* Return something equivalent to X but valid as a memory address for something
406 of mode MODE in the named address space AS. When X is not itself valid,
407 this works by copying X or subexpressions of it into registers. */
408
409 rtx
410 memory_address_addr_space (machine_mode mode, rtx x, addr_space_t as)
411 {
412 rtx oldx = x;
413 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
414
415 x = convert_memory_address_addr_space (address_mode, x, as);
416
417 /* By passing constant addresses through registers
418 we get a chance to cse them. */
419 if (! cse_not_expected && CONSTANT_P (x) && CONSTANT_ADDRESS_P (x))
420 x = force_reg (address_mode, x);
421
422 /* We get better cse by rejecting indirect addressing at this stage.
423 Let the combiner create indirect addresses where appropriate.
424 For now, generate the code so that the subexpressions useful to share
425 are visible. But not if cse won't be done! */
426 else
427 {
428 if (! cse_not_expected && !REG_P (x))
429 x = break_out_memory_refs (x);
430
431 /* At this point, any valid address is accepted. */
432 if (memory_address_addr_space_p (mode, x, as))
433 goto done;
434
435 /* If it was valid before but breaking out memory refs invalidated it,
436 use it the old way. */
437 if (memory_address_addr_space_p (mode, oldx, as))
438 {
439 x = oldx;
440 goto done;
441 }
442
443 /* Perform machine-dependent transformations on X
444 in certain cases. This is not necessary since the code
445 below can handle all possible cases, but machine-dependent
446 transformations can make better code. */
447 {
448 rtx orig_x = x;
449 x = targetm.addr_space.legitimize_address (x, oldx, mode, as);
450 if (orig_x != x && memory_address_addr_space_p (mode, x, as))
451 goto done;
452 }
453
454 /* PLUS and MULT can appear in special ways
455 as the result of attempts to make an address usable for indexing.
456 Usually they are dealt with by calling force_operand, below.
457 But a sum containing constant terms is special
458 if removing them makes the sum a valid address:
459 then we generate that address in a register
460 and index off of it. We do this because it often makes
461 shorter code, and because the addresses thus generated
462 in registers often become common subexpressions. */
463 if (GET_CODE (x) == PLUS)
464 {
465 rtx constant_term = const0_rtx;
466 rtx y = eliminate_constant_term (x, &constant_term);
467 if (constant_term == const0_rtx
468 || ! memory_address_addr_space_p (mode, y, as))
469 x = force_operand (x, NULL_RTX);
470 else
471 {
472 y = gen_rtx_PLUS (GET_MODE (x), copy_to_reg (y), constant_term);
473 if (! memory_address_addr_space_p (mode, y, as))
474 x = force_operand (x, NULL_RTX);
475 else
476 x = y;
477 }
478 }
479
480 else if (GET_CODE (x) == MULT || GET_CODE (x) == MINUS)
481 x = force_operand (x, NULL_RTX);
482
483 /* If we have a register that's an invalid address,
484 it must be a hard reg of the wrong class. Copy it to a pseudo. */
485 else if (REG_P (x))
486 x = copy_to_reg (x);
487
488 /* Last resort: copy the value to a register, since
489 the register is a valid address. */
490 else
491 x = force_reg (address_mode, x);
492 }
493
494 done:
495
496 gcc_assert (memory_address_addr_space_p (mode, x, as));
497 /* If we didn't change the address, we are done. Otherwise, mark
498 a reg as a pointer if we have REG or REG + CONST_INT. */
499 if (oldx == x)
500 return x;
501 else if (REG_P (x))
502 mark_reg_pointer (x, BITS_PER_UNIT);
503 else if (GET_CODE (x) == PLUS
504 && REG_P (XEXP (x, 0))
505 && CONST_INT_P (XEXP (x, 1)))
506 mark_reg_pointer (XEXP (x, 0), BITS_PER_UNIT);
507
508 /* OLDX may have been the address on a temporary. Update the address
509 to indicate that X is now used. */
510 update_temp_slot_address (oldx, x);
511
512 return x;
513 }
514
515 /* Convert a mem ref into one with a valid memory address.
516 Pass through anything else unchanged. */
517
518 rtx
519 validize_mem (rtx ref)
520 {
521 if (!MEM_P (ref))
522 return ref;
523 ref = use_anchored_address (ref);
524 if (memory_address_addr_space_p (GET_MODE (ref), XEXP (ref, 0),
525 MEM_ADDR_SPACE (ref)))
526 return ref;
527
528 /* Don't alter REF itself, since that is probably a stack slot. */
529 return replace_equiv_address (ref, XEXP (ref, 0));
530 }
531
532 /* If X is a memory reference to a member of an object block, try rewriting
533 it to use an anchor instead. Return the new memory reference on success
534 and the old one on failure. */
535
536 rtx
537 use_anchored_address (rtx x)
538 {
539 rtx base;
540 HOST_WIDE_INT offset;
541 machine_mode mode;
542
543 if (!flag_section_anchors)
544 return x;
545
546 if (!MEM_P (x))
547 return x;
548
549 /* Split the address into a base and offset. */
550 base = XEXP (x, 0);
551 offset = 0;
552 if (GET_CODE (base) == CONST
553 && GET_CODE (XEXP (base, 0)) == PLUS
554 && CONST_INT_P (XEXP (XEXP (base, 0), 1)))
555 {
556 offset += INTVAL (XEXP (XEXP (base, 0), 1));
557 base = XEXP (XEXP (base, 0), 0);
558 }
559
560 /* Check whether BASE is suitable for anchors. */
561 if (GET_CODE (base) != SYMBOL_REF
562 || !SYMBOL_REF_HAS_BLOCK_INFO_P (base)
563 || SYMBOL_REF_ANCHOR_P (base)
564 || SYMBOL_REF_BLOCK (base) == NULL
565 || !targetm.use_anchors_for_symbol_p (base))
566 return x;
567
568 /* Decide where BASE is going to be. */
569 place_block_symbol (base);
570
571 /* Get the anchor we need to use. */
572 offset += SYMBOL_REF_BLOCK_OFFSET (base);
573 base = get_section_anchor (SYMBOL_REF_BLOCK (base), offset,
574 SYMBOL_REF_TLS_MODEL (base));
575
576 /* Work out the offset from the anchor. */
577 offset -= SYMBOL_REF_BLOCK_OFFSET (base);
578
579 /* If we're going to run a CSE pass, force the anchor into a register.
580 We will then be able to reuse registers for several accesses, if the
581 target costs say that that's worthwhile. */
582 mode = GET_MODE (base);
583 if (!cse_not_expected)
584 base = force_reg (mode, base);
585
586 return replace_equiv_address (x, plus_constant (mode, base, offset));
587 }
588 \f
589 /* Copy the value or contents of X to a new temp reg and return that reg. */
590
591 rtx
592 copy_to_reg (rtx x)
593 {
594 rtx temp = gen_reg_rtx (GET_MODE (x));
595
596 /* If not an operand, must be an address with PLUS and MULT so
597 do the computation. */
598 if (! general_operand (x, VOIDmode))
599 x = force_operand (x, temp);
600
601 if (x != temp)
602 emit_move_insn (temp, x);
603
604 return temp;
605 }
606
607 /* Like copy_to_reg but always give the new register mode Pmode
608 in case X is a constant. */
609
610 rtx
611 copy_addr_to_reg (rtx x)
612 {
613 return copy_to_mode_reg (Pmode, x);
614 }
615
616 /* Like copy_to_reg but always give the new register mode MODE
617 in case X is a constant. */
618
619 rtx
620 copy_to_mode_reg (machine_mode mode, rtx x)
621 {
622 rtx temp = gen_reg_rtx (mode);
623
624 /* If not an operand, must be an address with PLUS and MULT so
625 do the computation. */
626 if (! general_operand (x, VOIDmode))
627 x = force_operand (x, temp);
628
629 gcc_assert (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode);
630 if (x != temp)
631 emit_move_insn (temp, x);
632 return temp;
633 }
634
635 /* Load X into a register if it is not already one.
636 Use mode MODE for the register.
637 X should be valid for mode MODE, but it may be a constant which
638 is valid for all integer modes; that's why caller must specify MODE.
639
640 The caller must not alter the value in the register we return,
641 since we mark it as a "constant" register. */
642
643 rtx
644 force_reg (machine_mode mode, rtx x)
645 {
646 rtx temp, set;
647 rtx_insn *insn;
648
649 if (REG_P (x))
650 return x;
651
652 if (general_operand (x, mode))
653 {
654 temp = gen_reg_rtx (mode);
655 insn = emit_move_insn (temp, x);
656 }
657 else
658 {
659 temp = force_operand (x, NULL_RTX);
660 if (REG_P (temp))
661 insn = get_last_insn ();
662 else
663 {
664 rtx temp2 = gen_reg_rtx (mode);
665 insn = emit_move_insn (temp2, temp);
666 temp = temp2;
667 }
668 }
669
670 /* Let optimizers know that TEMP's value never changes
671 and that X can be substituted for it. Don't get confused
672 if INSN set something else (such as a SUBREG of TEMP). */
673 if (CONSTANT_P (x)
674 && (set = single_set (insn)) != 0
675 && SET_DEST (set) == temp
676 && ! rtx_equal_p (x, SET_SRC (set)))
677 set_unique_reg_note (insn, REG_EQUAL, x);
678
679 /* Let optimizers know that TEMP is a pointer, and if so, the
680 known alignment of that pointer. */
681 {
682 unsigned align = 0;
683 if (GET_CODE (x) == SYMBOL_REF)
684 {
685 align = BITS_PER_UNIT;
686 if (SYMBOL_REF_DECL (x) && DECL_P (SYMBOL_REF_DECL (x)))
687 align = DECL_ALIGN (SYMBOL_REF_DECL (x));
688 }
689 else if (GET_CODE (x) == LABEL_REF)
690 align = BITS_PER_UNIT;
691 else if (GET_CODE (x) == CONST
692 && GET_CODE (XEXP (x, 0)) == PLUS
693 && GET_CODE (XEXP (XEXP (x, 0), 0)) == SYMBOL_REF
694 && CONST_INT_P (XEXP (XEXP (x, 0), 1)))
695 {
696 rtx s = XEXP (XEXP (x, 0), 0);
697 rtx c = XEXP (XEXP (x, 0), 1);
698 unsigned sa, ca;
699
700 sa = BITS_PER_UNIT;
701 if (SYMBOL_REF_DECL (s) && DECL_P (SYMBOL_REF_DECL (s)))
702 sa = DECL_ALIGN (SYMBOL_REF_DECL (s));
703
704 if (INTVAL (c) == 0)
705 align = sa;
706 else
707 {
708 ca = ctz_hwi (INTVAL (c)) * BITS_PER_UNIT;
709 align = MIN (sa, ca);
710 }
711 }
712
713 if (align || (MEM_P (x) && MEM_POINTER (x)))
714 mark_reg_pointer (temp, align);
715 }
716
717 return temp;
718 }
719
720 /* If X is a memory ref, copy its contents to a new temp reg and return
721 that reg. Otherwise, return X. */
722
723 rtx
724 force_not_mem (rtx x)
725 {
726 rtx temp;
727
728 if (!MEM_P (x) || GET_MODE (x) == BLKmode)
729 return x;
730
731 temp = gen_reg_rtx (GET_MODE (x));
732
733 if (MEM_POINTER (x))
734 REG_POINTER (temp) = 1;
735
736 emit_move_insn (temp, x);
737 return temp;
738 }
739
740 /* Copy X to TARGET (if it's nonzero and a reg)
741 or to a new temp reg and return that reg.
742 MODE is the mode to use for X in case it is a constant. */
743
744 rtx
745 copy_to_suggested_reg (rtx x, rtx target, machine_mode mode)
746 {
747 rtx temp;
748
749 if (target && REG_P (target))
750 temp = target;
751 else
752 temp = gen_reg_rtx (mode);
753
754 emit_move_insn (temp, x);
755 return temp;
756 }
757 \f
758 /* Return the mode to use to pass or return a scalar of TYPE and MODE.
759 PUNSIGNEDP points to the signedness of the type and may be adjusted
760 to show what signedness to use on extension operations.
761
762 FOR_RETURN is nonzero if the caller is promoting the return value
763 of FNDECL, else it is for promoting args. */
764
765 machine_mode
766 promote_function_mode (const_tree type, machine_mode mode, int *punsignedp,
767 const_tree funtype, int for_return)
768 {
769 /* Called without a type node for a libcall. */
770 if (type == NULL_TREE)
771 {
772 if (INTEGRAL_MODE_P (mode))
773 return targetm.calls.promote_function_mode (NULL_TREE, mode,
774 punsignedp, funtype,
775 for_return);
776 else
777 return mode;
778 }
779
780 switch (TREE_CODE (type))
781 {
782 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
783 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
784 case POINTER_TYPE: case REFERENCE_TYPE:
785 return targetm.calls.promote_function_mode (type, mode, punsignedp, funtype,
786 for_return);
787
788 default:
789 return mode;
790 }
791 }
792 /* Return the mode to use to store a scalar of TYPE and MODE.
793 PUNSIGNEDP points to the signedness of the type and may be adjusted
794 to show what signedness to use on extension operations. */
795
796 machine_mode
797 promote_mode (const_tree type ATTRIBUTE_UNUSED, machine_mode mode,
798 int *punsignedp ATTRIBUTE_UNUSED)
799 {
800 #ifdef PROMOTE_MODE
801 enum tree_code code;
802 int unsignedp;
803 scalar_mode smode;
804 #endif
805
806 /* For libcalls this is invoked without TYPE from the backends
807 TARGET_PROMOTE_FUNCTION_MODE hooks. Don't do anything in that
808 case. */
809 if (type == NULL_TREE)
810 return mode;
811
812 /* FIXME: this is the same logic that was there until GCC 4.4, but we
813 probably want to test POINTERS_EXTEND_UNSIGNED even if PROMOTE_MODE
814 is not defined. The affected targets are M32C, S390, SPARC. */
815 #ifdef PROMOTE_MODE
816 code = TREE_CODE (type);
817 unsignedp = *punsignedp;
818
819 switch (code)
820 {
821 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
822 case REAL_TYPE: case OFFSET_TYPE: case FIXED_POINT_TYPE:
823 /* Values of these types always have scalar mode. */
824 smode = as_a <scalar_mode> (mode);
825 PROMOTE_MODE (smode, unsignedp, type);
826 *punsignedp = unsignedp;
827 return smode;
828
829 #ifdef POINTERS_EXTEND_UNSIGNED
830 case REFERENCE_TYPE:
831 case POINTER_TYPE:
832 *punsignedp = POINTERS_EXTEND_UNSIGNED;
833 return targetm.addr_space.address_mode
834 (TYPE_ADDR_SPACE (TREE_TYPE (type)));
835 #endif
836
837 default:
838 return mode;
839 }
840 #else
841 return mode;
842 #endif
843 }
844
845
846 /* Use one of promote_mode or promote_function_mode to find the promoted
847 mode of DECL. If PUNSIGNEDP is not NULL, store there the unsignedness
848 of DECL after promotion. */
849
850 machine_mode
851 promote_decl_mode (const_tree decl, int *punsignedp)
852 {
853 tree type = TREE_TYPE (decl);
854 int unsignedp = TYPE_UNSIGNED (type);
855 machine_mode mode = DECL_MODE (decl);
856 machine_mode pmode;
857
858 if (TREE_CODE (decl) == RESULT_DECL && !DECL_BY_REFERENCE (decl))
859 pmode = promote_function_mode (type, mode, &unsignedp,
860 TREE_TYPE (current_function_decl), 1);
861 else if (TREE_CODE (decl) == RESULT_DECL || TREE_CODE (decl) == PARM_DECL)
862 pmode = promote_function_mode (type, mode, &unsignedp,
863 TREE_TYPE (current_function_decl), 2);
864 else
865 pmode = promote_mode (type, mode, &unsignedp);
866
867 if (punsignedp)
868 *punsignedp = unsignedp;
869 return pmode;
870 }
871
872 /* Return the promoted mode for name. If it is a named SSA_NAME, it
873 is the same as promote_decl_mode. Otherwise, it is the promoted
874 mode of a temp decl of same type as the SSA_NAME, if we had created
875 one. */
876
877 machine_mode
878 promote_ssa_mode (const_tree name, int *punsignedp)
879 {
880 gcc_assert (TREE_CODE (name) == SSA_NAME);
881
882 /* Partitions holding parms and results must be promoted as expected
883 by function.c. */
884 if (SSA_NAME_VAR (name)
885 && (TREE_CODE (SSA_NAME_VAR (name)) == PARM_DECL
886 || TREE_CODE (SSA_NAME_VAR (name)) == RESULT_DECL))
887 {
888 machine_mode mode = promote_decl_mode (SSA_NAME_VAR (name), punsignedp);
889 if (mode != BLKmode)
890 return mode;
891 }
892
893 tree type = TREE_TYPE (name);
894 int unsignedp = TYPE_UNSIGNED (type);
895 machine_mode pmode = promote_mode (type, TYPE_MODE (type), &unsignedp);
896 if (punsignedp)
897 *punsignedp = unsignedp;
898
899 return pmode;
900 }
901
902
903 \f
904 /* Controls the behavior of {anti_,}adjust_stack. */
905 static bool suppress_reg_args_size;
906
907 /* A helper for adjust_stack and anti_adjust_stack. */
908
909 static void
910 adjust_stack_1 (rtx adjust, bool anti_p)
911 {
912 rtx temp;
913 rtx_insn *insn;
914
915 /* Hereafter anti_p means subtract_p. */
916 if (!STACK_GROWS_DOWNWARD)
917 anti_p = !anti_p;
918
919 temp = expand_binop (Pmode,
920 anti_p ? sub_optab : add_optab,
921 stack_pointer_rtx, adjust, stack_pointer_rtx, 0,
922 OPTAB_LIB_WIDEN);
923
924 if (temp != stack_pointer_rtx)
925 insn = emit_move_insn (stack_pointer_rtx, temp);
926 else
927 {
928 insn = get_last_insn ();
929 temp = single_set (insn);
930 gcc_assert (temp != NULL && SET_DEST (temp) == stack_pointer_rtx);
931 }
932
933 if (!suppress_reg_args_size)
934 add_args_size_note (insn, stack_pointer_delta);
935 }
936
937 /* Adjust the stack pointer by ADJUST (an rtx for a number of bytes).
938 This pops when ADJUST is positive. ADJUST need not be constant. */
939
940 void
941 adjust_stack (rtx adjust)
942 {
943 if (adjust == const0_rtx)
944 return;
945
946 /* We expect all variable sized adjustments to be multiple of
947 PREFERRED_STACK_BOUNDARY. */
948 poly_int64 const_adjust;
949 if (poly_int_rtx_p (adjust, &const_adjust))
950 stack_pointer_delta -= const_adjust;
951
952 adjust_stack_1 (adjust, false);
953 }
954
955 /* Adjust the stack pointer by minus ADJUST (an rtx for a number of bytes).
956 This pushes when ADJUST is positive. ADJUST need not be constant. */
957
958 void
959 anti_adjust_stack (rtx adjust)
960 {
961 if (adjust == const0_rtx)
962 return;
963
964 /* We expect all variable sized adjustments to be multiple of
965 PREFERRED_STACK_BOUNDARY. */
966 poly_int64 const_adjust;
967 if (poly_int_rtx_p (adjust, &const_adjust))
968 stack_pointer_delta += const_adjust;
969
970 adjust_stack_1 (adjust, true);
971 }
972
973 /* Round the size of a block to be pushed up to the boundary required
974 by this machine. SIZE is the desired size, which need not be constant. */
975
976 static rtx
977 round_push (rtx size)
978 {
979 rtx align_rtx, alignm1_rtx;
980
981 if (!SUPPORTS_STACK_ALIGNMENT
982 || crtl->preferred_stack_boundary == MAX_SUPPORTED_STACK_ALIGNMENT)
983 {
984 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
985
986 if (align == 1)
987 return size;
988
989 if (CONST_INT_P (size))
990 {
991 HOST_WIDE_INT new_size = (INTVAL (size) + align - 1) / align * align;
992
993 if (INTVAL (size) != new_size)
994 size = GEN_INT (new_size);
995 return size;
996 }
997
998 align_rtx = GEN_INT (align);
999 alignm1_rtx = GEN_INT (align - 1);
1000 }
1001 else
1002 {
1003 /* If crtl->preferred_stack_boundary might still grow, use
1004 virtual_preferred_stack_boundary_rtx instead. This will be
1005 substituted by the right value in vregs pass and optimized
1006 during combine. */
1007 align_rtx = virtual_preferred_stack_boundary_rtx;
1008 alignm1_rtx = force_operand (plus_constant (Pmode, align_rtx, -1),
1009 NULL_RTX);
1010 }
1011
1012 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1013 but we know it can't. So add ourselves and then do
1014 TRUNC_DIV_EXPR. */
1015 size = expand_binop (Pmode, add_optab, size, alignm1_rtx,
1016 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1017 size = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, size, align_rtx,
1018 NULL_RTX, 1);
1019 size = expand_mult (Pmode, size, align_rtx, NULL_RTX, 1);
1020
1021 return size;
1022 }
1023 \f
1024 /* Save the stack pointer for the purpose in SAVE_LEVEL. PSAVE is a pointer
1025 to a previously-created save area. If no save area has been allocated,
1026 this function will allocate one. If a save area is specified, it
1027 must be of the proper mode. */
1028
1029 void
1030 emit_stack_save (enum save_level save_level, rtx *psave)
1031 {
1032 rtx sa = *psave;
1033 /* The default is that we use a move insn and save in a Pmode object. */
1034 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1035 machine_mode mode = STACK_SAVEAREA_MODE (save_level);
1036
1037 /* See if this machine has anything special to do for this kind of save. */
1038 switch (save_level)
1039 {
1040 case SAVE_BLOCK:
1041 if (targetm.have_save_stack_block ())
1042 fcn = targetm.gen_save_stack_block;
1043 break;
1044 case SAVE_FUNCTION:
1045 if (targetm.have_save_stack_function ())
1046 fcn = targetm.gen_save_stack_function;
1047 break;
1048 case SAVE_NONLOCAL:
1049 if (targetm.have_save_stack_nonlocal ())
1050 fcn = targetm.gen_save_stack_nonlocal;
1051 break;
1052 default:
1053 break;
1054 }
1055
1056 /* If there is no save area and we have to allocate one, do so. Otherwise
1057 verify the save area is the proper mode. */
1058
1059 if (sa == 0)
1060 {
1061 if (mode != VOIDmode)
1062 {
1063 if (save_level == SAVE_NONLOCAL)
1064 *psave = sa = assign_stack_local (mode, GET_MODE_SIZE (mode), 0);
1065 else
1066 *psave = sa = gen_reg_rtx (mode);
1067 }
1068 }
1069
1070 do_pending_stack_adjust ();
1071 if (sa != 0)
1072 sa = validize_mem (sa);
1073 emit_insn (fcn (sa, stack_pointer_rtx));
1074 }
1075
1076 /* Restore the stack pointer for the purpose in SAVE_LEVEL. SA is the save
1077 area made by emit_stack_save. If it is zero, we have nothing to do. */
1078
1079 void
1080 emit_stack_restore (enum save_level save_level, rtx sa)
1081 {
1082 /* The default is that we use a move insn. */
1083 rtx_insn *(*fcn) (rtx, rtx) = gen_move_insn;
1084
1085 /* If stack_realign_drap, the x86 backend emits a prologue that aligns both
1086 STACK_POINTER and HARD_FRAME_POINTER.
1087 If stack_realign_fp, the x86 backend emits a prologue that aligns only
1088 STACK_POINTER. This renders the HARD_FRAME_POINTER unusable for accessing
1089 aligned variables, which is reflected in ix86_can_eliminate.
1090 We normally still have the realigned STACK_POINTER that we can use.
1091 But if there is a stack restore still present at reload, it can trigger
1092 mark_not_eliminable for the STACK_POINTER, leaving no way to eliminate
1093 FRAME_POINTER into a hard reg.
1094 To prevent this situation, we force need_drap if we emit a stack
1095 restore. */
1096 if (SUPPORTS_STACK_ALIGNMENT)
1097 crtl->need_drap = true;
1098
1099 /* See if this machine has anything special to do for this kind of save. */
1100 switch (save_level)
1101 {
1102 case SAVE_BLOCK:
1103 if (targetm.have_restore_stack_block ())
1104 fcn = targetm.gen_restore_stack_block;
1105 break;
1106 case SAVE_FUNCTION:
1107 if (targetm.have_restore_stack_function ())
1108 fcn = targetm.gen_restore_stack_function;
1109 break;
1110 case SAVE_NONLOCAL:
1111 if (targetm.have_restore_stack_nonlocal ())
1112 fcn = targetm.gen_restore_stack_nonlocal;
1113 break;
1114 default:
1115 break;
1116 }
1117
1118 if (sa != 0)
1119 {
1120 sa = validize_mem (sa);
1121 /* These clobbers prevent the scheduler from moving
1122 references to variable arrays below the code
1123 that deletes (pops) the arrays. */
1124 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1125 emit_clobber (gen_rtx_MEM (BLKmode, stack_pointer_rtx));
1126 }
1127
1128 discard_pending_stack_adjust ();
1129
1130 emit_insn (fcn (stack_pointer_rtx, sa));
1131 }
1132
1133 /* Invoke emit_stack_save on the nonlocal_goto_save_area for the current
1134 function. This should be called whenever we allocate or deallocate
1135 dynamic stack space. */
1136
1137 void
1138 update_nonlocal_goto_save_area (void)
1139 {
1140 tree t_save;
1141 rtx r_save;
1142
1143 /* The nonlocal_goto_save_area object is an array of N pointers. The
1144 first one is used for the frame pointer save; the rest are sized by
1145 STACK_SAVEAREA_MODE. Create a reference to array index 1, the first
1146 of the stack save area slots. */
1147 t_save = build4 (ARRAY_REF,
1148 TREE_TYPE (TREE_TYPE (cfun->nonlocal_goto_save_area)),
1149 cfun->nonlocal_goto_save_area,
1150 integer_one_node, NULL_TREE, NULL_TREE);
1151 r_save = expand_expr (t_save, NULL_RTX, VOIDmode, EXPAND_WRITE);
1152
1153 emit_stack_save (SAVE_NONLOCAL, &r_save);
1154 }
1155
1156 /* Record a new stack level for the current function. This should be called
1157 whenever we allocate or deallocate dynamic stack space. */
1158
1159 void
1160 record_new_stack_level (void)
1161 {
1162 /* Record the new stack level for nonlocal gotos. */
1163 if (cfun->nonlocal_goto_save_area)
1164 update_nonlocal_goto_save_area ();
1165
1166 /* Record the new stack level for SJLJ exceptions. */
1167 if (targetm_common.except_unwind_info (&global_options) == UI_SJLJ)
1168 update_sjlj_context ();
1169 }
1170
1171 /* Return an rtx doing runtime alignment to REQUIRED_ALIGN on TARGET. */
1172
1173 rtx
1174 align_dynamic_address (rtx target, unsigned required_align)
1175 {
1176 /* CEIL_DIV_EXPR needs to worry about the addition overflowing,
1177 but we know it can't. So add ourselves and then do
1178 TRUNC_DIV_EXPR. */
1179 target = expand_binop (Pmode, add_optab, target,
1180 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1181 Pmode),
1182 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1183 target = expand_divmod (0, TRUNC_DIV_EXPR, Pmode, target,
1184 gen_int_mode (required_align / BITS_PER_UNIT,
1185 Pmode),
1186 NULL_RTX, 1);
1187 target = expand_mult (Pmode, target,
1188 gen_int_mode (required_align / BITS_PER_UNIT,
1189 Pmode),
1190 NULL_RTX, 1);
1191
1192 return target;
1193 }
1194
1195 /* Return an rtx through *PSIZE, representing the size of an area of memory to
1196 be dynamically pushed on the stack.
1197
1198 *PSIZE is an rtx representing the size of the area.
1199
1200 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1201 parameter may be zero. If so, a proper value will be extracted
1202 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1203
1204 REQUIRED_ALIGN is the alignment (in bits) required for the region
1205 of memory.
1206
1207 If PSTACK_USAGE_SIZE is not NULL it points to a value that is increased for
1208 the additional size returned. */
1209 void
1210 get_dynamic_stack_size (rtx *psize, unsigned size_align,
1211 unsigned required_align,
1212 HOST_WIDE_INT *pstack_usage_size)
1213 {
1214 rtx size = *psize;
1215
1216 /* Ensure the size is in the proper mode. */
1217 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1218 size = convert_to_mode (Pmode, size, 1);
1219
1220 if (CONST_INT_P (size))
1221 {
1222 unsigned HOST_WIDE_INT lsb;
1223
1224 lsb = INTVAL (size);
1225 lsb &= -lsb;
1226
1227 /* Watch out for overflow truncating to "unsigned". */
1228 if (lsb > UINT_MAX / BITS_PER_UNIT)
1229 size_align = 1u << (HOST_BITS_PER_INT - 1);
1230 else
1231 size_align = (unsigned)lsb * BITS_PER_UNIT;
1232 }
1233 else if (size_align < BITS_PER_UNIT)
1234 size_align = BITS_PER_UNIT;
1235
1236 /* We can't attempt to minimize alignment necessary, because we don't
1237 know the final value of preferred_stack_boundary yet while executing
1238 this code. */
1239 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1240 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1241
1242 /* We will need to ensure that the address we return is aligned to
1243 REQUIRED_ALIGN. At this point in the compilation, we don't always
1244 know the final value of the STACK_DYNAMIC_OFFSET used in function.c
1245 (it might depend on the size of the outgoing parameter lists, for
1246 example), so we must preventively align the value. We leave space
1247 in SIZE for the hole that might result from the alignment operation. */
1248
1249 unsigned known_align = REGNO_POINTER_ALIGN (VIRTUAL_STACK_DYNAMIC_REGNUM);
1250 if (known_align == 0)
1251 known_align = BITS_PER_UNIT;
1252 if (required_align > known_align)
1253 {
1254 unsigned extra = (required_align - known_align) / BITS_PER_UNIT;
1255 size = plus_constant (Pmode, size, extra);
1256 size = force_operand (size, NULL_RTX);
1257 if (size_align > known_align)
1258 size_align = known_align;
1259
1260 if (flag_stack_usage_info && pstack_usage_size)
1261 *pstack_usage_size += extra;
1262 }
1263
1264 /* Round the size to a multiple of the required stack alignment.
1265 Since the stack is presumed to be rounded before this allocation,
1266 this will maintain the required alignment.
1267
1268 If the stack grows downward, we could save an insn by subtracting
1269 SIZE from the stack pointer and then aligning the stack pointer.
1270 The problem with this is that the stack pointer may be unaligned
1271 between the execution of the subtraction and alignment insns and
1272 some machines do not allow this. Even on those that do, some
1273 signal handlers malfunction if a signal should occur between those
1274 insns. Since this is an extremely rare event, we have no reliable
1275 way of knowing which systems have this problem. So we avoid even
1276 momentarily mis-aligning the stack. */
1277 if (size_align % MAX_SUPPORTED_STACK_ALIGNMENT != 0)
1278 {
1279 size = round_push (size);
1280
1281 if (flag_stack_usage_info && pstack_usage_size)
1282 {
1283 int align = crtl->preferred_stack_boundary / BITS_PER_UNIT;
1284 *pstack_usage_size =
1285 (*pstack_usage_size + align - 1) / align * align;
1286 }
1287 }
1288
1289 *psize = size;
1290 }
1291
1292 /* Return the number of bytes to "protect" on the stack for -fstack-check.
1293
1294 "protect" in the context of -fstack-check means how many bytes we
1295 should always ensure are available on the stack. More importantly
1296 this is how many bytes are skipped when probing the stack.
1297
1298 On some targets we want to reuse the -fstack-check prologue support
1299 to give a degree of protection against stack clashing style attacks.
1300
1301 In that scenario we do not want to skip bytes before probing as that
1302 would render the stack clash protections useless.
1303
1304 So we never use STACK_CHECK_PROTECT directly. Instead we indirect though
1305 this helper which allows us to provide different values for
1306 -fstack-check and -fstack-clash-protection. */
1307 HOST_WIDE_INT
1308 get_stack_check_protect (void)
1309 {
1310 if (flag_stack_clash_protection)
1311 return 0;
1312 return STACK_CHECK_PROTECT;
1313 }
1314
1315 /* Return an rtx representing the address of an area of memory dynamically
1316 pushed on the stack.
1317
1318 Any required stack pointer alignment is preserved.
1319
1320 SIZE is an rtx representing the size of the area.
1321
1322 SIZE_ALIGN is the alignment (in bits) that we know SIZE has. This
1323 parameter may be zero. If so, a proper value will be extracted
1324 from SIZE if it is constant, otherwise BITS_PER_UNIT will be assumed.
1325
1326 REQUIRED_ALIGN is the alignment (in bits) required for the region
1327 of memory.
1328
1329 MAX_SIZE is an upper bound for SIZE, if SIZE is not constant, or -1 if
1330 no such upper bound is known.
1331
1332 If CANNOT_ACCUMULATE is set to TRUE, the caller guarantees that the
1333 stack space allocated by the generated code cannot be added with itself
1334 in the course of the execution of the function. It is always safe to
1335 pass FALSE here and the following criterion is sufficient in order to
1336 pass TRUE: every path in the CFG that starts at the allocation point and
1337 loops to it executes the associated deallocation code. */
1338
1339 rtx
1340 allocate_dynamic_stack_space (rtx size, unsigned size_align,
1341 unsigned required_align,
1342 HOST_WIDE_INT max_size,
1343 bool cannot_accumulate)
1344 {
1345 HOST_WIDE_INT stack_usage_size = -1;
1346 rtx_code_label *final_label;
1347 rtx final_target, target;
1348
1349 /* If we're asking for zero bytes, it doesn't matter what we point
1350 to since we can't dereference it. But return a reasonable
1351 address anyway. */
1352 if (size == const0_rtx)
1353 return virtual_stack_dynamic_rtx;
1354
1355 /* Otherwise, show we're calling alloca or equivalent. */
1356 cfun->calls_alloca = 1;
1357
1358 /* If stack usage info is requested, look into the size we are passed.
1359 We need to do so this early to avoid the obfuscation that may be
1360 introduced later by the various alignment operations. */
1361 if (flag_stack_usage_info)
1362 {
1363 if (CONST_INT_P (size))
1364 stack_usage_size = INTVAL (size);
1365 else if (REG_P (size))
1366 {
1367 /* Look into the last emitted insn and see if we can deduce
1368 something for the register. */
1369 rtx_insn *insn;
1370 rtx set, note;
1371 insn = get_last_insn ();
1372 if ((set = single_set (insn)) && rtx_equal_p (SET_DEST (set), size))
1373 {
1374 if (CONST_INT_P (SET_SRC (set)))
1375 stack_usage_size = INTVAL (SET_SRC (set));
1376 else if ((note = find_reg_equal_equiv_note (insn))
1377 && CONST_INT_P (XEXP (note, 0)))
1378 stack_usage_size = INTVAL (XEXP (note, 0));
1379 }
1380 }
1381
1382 /* If the size is not constant, try the maximum size. */
1383 if (stack_usage_size < 0)
1384 stack_usage_size = max_size;
1385
1386 /* If the size is still not constant, we can't say anything. */
1387 if (stack_usage_size < 0)
1388 {
1389 current_function_has_unbounded_dynamic_stack_size = 1;
1390 stack_usage_size = 0;
1391 }
1392 }
1393
1394 get_dynamic_stack_size (&size, size_align, required_align, &stack_usage_size);
1395
1396 target = gen_reg_rtx (Pmode);
1397
1398 /* The size is supposed to be fully adjusted at this point so record it
1399 if stack usage info is requested. */
1400 if (flag_stack_usage_info)
1401 {
1402 current_function_dynamic_stack_size += stack_usage_size;
1403
1404 /* ??? This is gross but the only safe stance in the absence
1405 of stack usage oriented flow analysis. */
1406 if (!cannot_accumulate)
1407 current_function_has_unbounded_dynamic_stack_size = 1;
1408 }
1409
1410 do_pending_stack_adjust ();
1411
1412 final_label = NULL;
1413 final_target = NULL_RTX;
1414
1415 /* If we are splitting the stack, we need to ask the backend whether
1416 there is enough room on the current stack. If there isn't, or if
1417 the backend doesn't know how to tell is, then we need to call a
1418 function to allocate memory in some other way. This memory will
1419 be released when we release the current stack segment. The
1420 effect is that stack allocation becomes less efficient, but at
1421 least it doesn't cause a stack overflow. */
1422 if (flag_split_stack)
1423 {
1424 rtx_code_label *available_label;
1425 rtx ask, space, func;
1426
1427 available_label = NULL;
1428
1429 if (targetm.have_split_stack_space_check ())
1430 {
1431 available_label = gen_label_rtx ();
1432
1433 /* This instruction will branch to AVAILABLE_LABEL if there
1434 are SIZE bytes available on the stack. */
1435 emit_insn (targetm.gen_split_stack_space_check
1436 (size, available_label));
1437 }
1438
1439 /* The __morestack_allocate_stack_space function will allocate
1440 memory using malloc. If the alignment of the memory returned
1441 by malloc does not meet REQUIRED_ALIGN, we increase SIZE to
1442 make sure we allocate enough space. */
1443 if (MALLOC_ABI_ALIGNMENT >= required_align)
1444 ask = size;
1445 else
1446 ask = expand_binop (Pmode, add_optab, size,
1447 gen_int_mode (required_align / BITS_PER_UNIT - 1,
1448 Pmode),
1449 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1450
1451 func = init_one_libfunc ("__morestack_allocate_stack_space");
1452
1453 space = emit_library_call_value (func, target, LCT_NORMAL, Pmode,
1454 ask, Pmode);
1455
1456 if (available_label == NULL_RTX)
1457 return space;
1458
1459 final_target = gen_reg_rtx (Pmode);
1460
1461 emit_move_insn (final_target, space);
1462
1463 final_label = gen_label_rtx ();
1464 emit_jump (final_label);
1465
1466 emit_label (available_label);
1467 }
1468
1469 /* We ought to be called always on the toplevel and stack ought to be aligned
1470 properly. */
1471 gcc_assert (multiple_p (stack_pointer_delta,
1472 PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT));
1473
1474 /* If needed, check that we have the required amount of stack. Take into
1475 account what has already been checked. */
1476 if (STACK_CHECK_MOVING_SP)
1477 ;
1478 else if (flag_stack_check == GENERIC_STACK_CHECK)
1479 probe_stack_range (STACK_OLD_CHECK_PROTECT + STACK_CHECK_MAX_FRAME_SIZE,
1480 size);
1481 else if (flag_stack_check == STATIC_BUILTIN_STACK_CHECK)
1482 probe_stack_range (get_stack_check_protect (), size);
1483
1484 /* Don't let anti_adjust_stack emit notes. */
1485 suppress_reg_args_size = true;
1486
1487 /* Perform the required allocation from the stack. Some systems do
1488 this differently than simply incrementing/decrementing from the
1489 stack pointer, such as acquiring the space by calling malloc(). */
1490 if (targetm.have_allocate_stack ())
1491 {
1492 class expand_operand ops[2];
1493 /* We don't have to check against the predicate for operand 0 since
1494 TARGET is known to be a pseudo of the proper mode, which must
1495 be valid for the operand. */
1496 create_fixed_operand (&ops[0], target);
1497 create_convert_operand_to (&ops[1], size, STACK_SIZE_MODE, true);
1498 expand_insn (targetm.code_for_allocate_stack, 2, ops);
1499 }
1500 else
1501 {
1502 poly_int64 saved_stack_pointer_delta;
1503
1504 if (!STACK_GROWS_DOWNWARD)
1505 emit_move_insn (target, virtual_stack_dynamic_rtx);
1506
1507 /* Check stack bounds if necessary. */
1508 if (crtl->limit_stack)
1509 {
1510 rtx available;
1511 rtx_code_label *space_available = gen_label_rtx ();
1512 if (STACK_GROWS_DOWNWARD)
1513 available = expand_binop (Pmode, sub_optab,
1514 stack_pointer_rtx, stack_limit_rtx,
1515 NULL_RTX, 1, OPTAB_WIDEN);
1516 else
1517 available = expand_binop (Pmode, sub_optab,
1518 stack_limit_rtx, stack_pointer_rtx,
1519 NULL_RTX, 1, OPTAB_WIDEN);
1520
1521 emit_cmp_and_jump_insns (available, size, GEU, NULL_RTX, Pmode, 1,
1522 space_available);
1523 if (targetm.have_trap ())
1524 emit_insn (targetm.gen_trap ());
1525 else
1526 error ("stack limits not supported on this target");
1527 emit_barrier ();
1528 emit_label (space_available);
1529 }
1530
1531 saved_stack_pointer_delta = stack_pointer_delta;
1532
1533 if (flag_stack_check && STACK_CHECK_MOVING_SP)
1534 anti_adjust_stack_and_probe (size, false);
1535 else if (flag_stack_clash_protection)
1536 anti_adjust_stack_and_probe_stack_clash (size);
1537 else
1538 anti_adjust_stack (size);
1539
1540 /* Even if size is constant, don't modify stack_pointer_delta.
1541 The constant size alloca should preserve
1542 crtl->preferred_stack_boundary alignment. */
1543 stack_pointer_delta = saved_stack_pointer_delta;
1544
1545 if (STACK_GROWS_DOWNWARD)
1546 emit_move_insn (target, virtual_stack_dynamic_rtx);
1547 }
1548
1549 suppress_reg_args_size = false;
1550
1551 /* Finish up the split stack handling. */
1552 if (final_label != NULL_RTX)
1553 {
1554 gcc_assert (flag_split_stack);
1555 emit_move_insn (final_target, target);
1556 emit_label (final_label);
1557 target = final_target;
1558 }
1559
1560 target = align_dynamic_address (target, required_align);
1561
1562 /* Now that we've committed to a return value, mark its alignment. */
1563 mark_reg_pointer (target, required_align);
1564
1565 /* Record the new stack level. */
1566 record_new_stack_level ();
1567
1568 return target;
1569 }
1570
1571 /* Return an rtx representing the address of an area of memory already
1572 statically pushed onto the stack in the virtual stack vars area. (It is
1573 assumed that the area is allocated in the function prologue.)
1574
1575 Any required stack pointer alignment is preserved.
1576
1577 OFFSET is the offset of the area into the virtual stack vars area.
1578
1579 REQUIRED_ALIGN is the alignment (in bits) required for the region
1580 of memory. */
1581
1582 rtx
1583 get_dynamic_stack_base (poly_int64 offset, unsigned required_align)
1584 {
1585 rtx target;
1586
1587 if (crtl->preferred_stack_boundary < PREFERRED_STACK_BOUNDARY)
1588 crtl->preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
1589
1590 target = gen_reg_rtx (Pmode);
1591 emit_move_insn (target, virtual_stack_vars_rtx);
1592 target = expand_binop (Pmode, add_optab, target,
1593 gen_int_mode (offset, Pmode),
1594 NULL_RTX, 1, OPTAB_LIB_WIDEN);
1595 target = align_dynamic_address (target, required_align);
1596
1597 /* Now that we've committed to a return value, mark its alignment. */
1598 mark_reg_pointer (target, required_align);
1599
1600 return target;
1601 }
1602 \f
1603 /* A front end may want to override GCC's stack checking by providing a
1604 run-time routine to call to check the stack, so provide a mechanism for
1605 calling that routine. */
1606
1607 static GTY(()) rtx stack_check_libfunc;
1608
1609 void
1610 set_stack_check_libfunc (const char *libfunc_name)
1611 {
1612 gcc_assert (stack_check_libfunc == NULL_RTX);
1613 stack_check_libfunc = gen_rtx_SYMBOL_REF (Pmode, libfunc_name);
1614 }
1615 \f
1616 /* Emit one stack probe at ADDRESS, an address within the stack. */
1617
1618 void
1619 emit_stack_probe (rtx address)
1620 {
1621 if (targetm.have_probe_stack_address ())
1622 {
1623 class expand_operand ops[1];
1624 insn_code icode = targetm.code_for_probe_stack_address;
1625 create_address_operand (ops, address);
1626 maybe_legitimize_operands (icode, 0, 1, ops);
1627 expand_insn (icode, 1, ops);
1628 }
1629 else
1630 {
1631 rtx memref = gen_rtx_MEM (word_mode, address);
1632
1633 MEM_VOLATILE_P (memref) = 1;
1634 memref = validize_mem (memref);
1635
1636 /* See if we have an insn to probe the stack. */
1637 if (targetm.have_probe_stack ())
1638 emit_insn (targetm.gen_probe_stack (memref));
1639 else
1640 emit_move_insn (memref, const0_rtx);
1641 }
1642 }
1643
1644 /* Probe a range of stack addresses from FIRST to FIRST+SIZE, inclusive.
1645 FIRST is a constant and size is a Pmode RTX. These are offsets from
1646 the current stack pointer. STACK_GROWS_DOWNWARD says whether to add
1647 or subtract them from the stack pointer. */
1648
1649 #define PROBE_INTERVAL (1 << STACK_CHECK_PROBE_INTERVAL_EXP)
1650
1651 #if STACK_GROWS_DOWNWARD
1652 #define STACK_GROW_OP MINUS
1653 #define STACK_GROW_OPTAB sub_optab
1654 #define STACK_GROW_OFF(off) -(off)
1655 #else
1656 #define STACK_GROW_OP PLUS
1657 #define STACK_GROW_OPTAB add_optab
1658 #define STACK_GROW_OFF(off) (off)
1659 #endif
1660
1661 void
1662 probe_stack_range (HOST_WIDE_INT first, rtx size)
1663 {
1664 /* First ensure SIZE is Pmode. */
1665 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1666 size = convert_to_mode (Pmode, size, 1);
1667
1668 /* Next see if we have a function to check the stack. */
1669 if (stack_check_libfunc)
1670 {
1671 rtx addr = memory_address (Pmode,
1672 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1673 stack_pointer_rtx,
1674 plus_constant (Pmode,
1675 size, first)));
1676 emit_library_call (stack_check_libfunc, LCT_THROW, VOIDmode,
1677 addr, Pmode);
1678 }
1679
1680 /* Next see if we have an insn to check the stack. */
1681 else if (targetm.have_check_stack ())
1682 {
1683 class expand_operand ops[1];
1684 rtx addr = memory_address (Pmode,
1685 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1686 stack_pointer_rtx,
1687 plus_constant (Pmode,
1688 size, first)));
1689 bool success;
1690 create_input_operand (&ops[0], addr, Pmode);
1691 success = maybe_expand_insn (targetm.code_for_check_stack, 1, ops);
1692 gcc_assert (success);
1693 }
1694
1695 /* Otherwise we have to generate explicit probes. If we have a constant
1696 small number of them to generate, that's the easy case. */
1697 else if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
1698 {
1699 HOST_WIDE_INT isize = INTVAL (size), i;
1700 rtx addr;
1701
1702 /* Probe at FIRST + N * PROBE_INTERVAL for values of N from 1 until
1703 it exceeds SIZE. If only one probe is needed, this will not
1704 generate any code. Then probe at FIRST + SIZE. */
1705 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
1706 {
1707 addr = memory_address (Pmode,
1708 plus_constant (Pmode, stack_pointer_rtx,
1709 STACK_GROW_OFF (first + i)));
1710 emit_stack_probe (addr);
1711 }
1712
1713 addr = memory_address (Pmode,
1714 plus_constant (Pmode, stack_pointer_rtx,
1715 STACK_GROW_OFF (first + isize)));
1716 emit_stack_probe (addr);
1717 }
1718
1719 /* In the variable case, do the same as above, but in a loop. Note that we
1720 must be extra careful with variables wrapping around because we might be
1721 at the very top (or the very bottom) of the address space and we have to
1722 be able to handle this case properly; in particular, we use an equality
1723 test for the loop condition. */
1724 else
1725 {
1726 rtx rounded_size, rounded_size_op, test_addr, last_addr, temp;
1727 rtx_code_label *loop_lab = gen_label_rtx ();
1728 rtx_code_label *end_lab = gen_label_rtx ();
1729
1730 /* Step 1: round SIZE to the previous multiple of the interval. */
1731
1732 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
1733 rounded_size
1734 = simplify_gen_binary (AND, Pmode, size,
1735 gen_int_mode (-PROBE_INTERVAL, Pmode));
1736 rounded_size_op = force_operand (rounded_size, NULL_RTX);
1737
1738
1739 /* Step 2: compute initial and final value of the loop counter. */
1740
1741 /* TEST_ADDR = SP + FIRST. */
1742 test_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1743 stack_pointer_rtx,
1744 gen_int_mode (first, Pmode)),
1745 NULL_RTX);
1746
1747 /* LAST_ADDR = SP + FIRST + ROUNDED_SIZE. */
1748 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1749 test_addr,
1750 rounded_size_op), NULL_RTX);
1751
1752
1753 /* Step 3: the loop
1754
1755 while (TEST_ADDR != LAST_ADDR)
1756 {
1757 TEST_ADDR = TEST_ADDR + PROBE_INTERVAL
1758 probe at TEST_ADDR
1759 }
1760
1761 probes at FIRST + N * PROBE_INTERVAL for values of N from 1
1762 until it is equal to ROUNDED_SIZE. */
1763
1764 emit_label (loop_lab);
1765
1766 /* Jump to END_LAB if TEST_ADDR == LAST_ADDR. */
1767 emit_cmp_and_jump_insns (test_addr, last_addr, EQ, NULL_RTX, Pmode, 1,
1768 end_lab);
1769
1770 /* TEST_ADDR = TEST_ADDR + PROBE_INTERVAL. */
1771 temp = expand_binop (Pmode, STACK_GROW_OPTAB, test_addr,
1772 gen_int_mode (PROBE_INTERVAL, Pmode), test_addr,
1773 1, OPTAB_WIDEN);
1774
1775 gcc_assert (temp == test_addr);
1776
1777 /* Probe at TEST_ADDR. */
1778 emit_stack_probe (test_addr);
1779
1780 emit_jump (loop_lab);
1781
1782 emit_label (end_lab);
1783
1784
1785 /* Step 4: probe at FIRST + SIZE if we cannot assert at compile-time
1786 that SIZE is equal to ROUNDED_SIZE. */
1787
1788 /* TEMP = SIZE - ROUNDED_SIZE. */
1789 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
1790 if (temp != const0_rtx)
1791 {
1792 rtx addr;
1793
1794 if (CONST_INT_P (temp))
1795 {
1796 /* Use [base + disp} addressing mode if supported. */
1797 HOST_WIDE_INT offset = INTVAL (temp);
1798 addr = memory_address (Pmode,
1799 plus_constant (Pmode, last_addr,
1800 STACK_GROW_OFF (offset)));
1801 }
1802 else
1803 {
1804 /* Manual CSE if the difference is not known at compile-time. */
1805 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
1806 addr = memory_address (Pmode,
1807 gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1808 last_addr, temp));
1809 }
1810
1811 emit_stack_probe (addr);
1812 }
1813 }
1814
1815 /* Make sure nothing is scheduled before we are done. */
1816 emit_insn (gen_blockage ());
1817 }
1818
1819 /* Compute parameters for stack clash probing a dynamic stack
1820 allocation of SIZE bytes.
1821
1822 We compute ROUNDED_SIZE, LAST_ADDR, RESIDUAL and PROBE_INTERVAL.
1823
1824 Additionally we conditionally dump the type of probing that will
1825 be needed given the values computed. */
1826
1827 void
1828 compute_stack_clash_protection_loop_data (rtx *rounded_size, rtx *last_addr,
1829 rtx *residual,
1830 HOST_WIDE_INT *probe_interval,
1831 rtx size)
1832 {
1833 /* Round SIZE down to STACK_CLASH_PROTECTION_PROBE_INTERVAL */
1834 *probe_interval
1835 = 1 << PARAM_VALUE (PARAM_STACK_CLASH_PROTECTION_PROBE_INTERVAL);
1836 *rounded_size = simplify_gen_binary (AND, Pmode, size,
1837 GEN_INT (-*probe_interval));
1838
1839 /* Compute the value of the stack pointer for the last iteration.
1840 It's just SP + ROUNDED_SIZE. */
1841 rtx rounded_size_op = force_operand (*rounded_size, NULL_RTX);
1842 *last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
1843 stack_pointer_rtx,
1844 rounded_size_op),
1845 NULL_RTX);
1846
1847 /* Compute any residuals not allocated by the loop above. Residuals
1848 are just the ROUNDED_SIZE - SIZE. */
1849 *residual = simplify_gen_binary (MINUS, Pmode, size, *rounded_size);
1850
1851 /* Dump key information to make writing tests easy. */
1852 if (dump_file)
1853 {
1854 if (*rounded_size == CONST0_RTX (Pmode))
1855 fprintf (dump_file,
1856 "Stack clash skipped dynamic allocation and probing loop.\n");
1857 else if (CONST_INT_P (*rounded_size)
1858 && INTVAL (*rounded_size) <= 4 * *probe_interval)
1859 fprintf (dump_file,
1860 "Stack clash dynamic allocation and probing inline.\n");
1861 else if (CONST_INT_P (*rounded_size))
1862 fprintf (dump_file,
1863 "Stack clash dynamic allocation and probing in "
1864 "rotated loop.\n");
1865 else
1866 fprintf (dump_file,
1867 "Stack clash dynamic allocation and probing in loop.\n");
1868
1869 if (*residual != CONST0_RTX (Pmode))
1870 fprintf (dump_file,
1871 "Stack clash dynamic allocation and probing residuals.\n");
1872 else
1873 fprintf (dump_file,
1874 "Stack clash skipped dynamic allocation and "
1875 "probing residuals.\n");
1876 }
1877 }
1878
1879 /* Emit the start of an allocate/probe loop for stack
1880 clash protection.
1881
1882 LOOP_LAB and END_LAB are returned for use when we emit the
1883 end of the loop.
1884
1885 LAST addr is the value for SP which stops the loop. */
1886 void
1887 emit_stack_clash_protection_probe_loop_start (rtx *loop_lab,
1888 rtx *end_lab,
1889 rtx last_addr,
1890 bool rotated)
1891 {
1892 /* Essentially we want to emit any setup code, the top of loop
1893 label and the comparison at the top of the loop. */
1894 *loop_lab = gen_label_rtx ();
1895 *end_lab = gen_label_rtx ();
1896
1897 emit_label (*loop_lab);
1898 if (!rotated)
1899 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
1900 Pmode, 1, *end_lab);
1901 }
1902
1903 /* Emit the end of a stack clash probing loop.
1904
1905 This consists of just the jump back to LOOP_LAB and
1906 emitting END_LOOP after the loop. */
1907
1908 void
1909 emit_stack_clash_protection_probe_loop_end (rtx loop_lab, rtx end_loop,
1910 rtx last_addr, bool rotated)
1911 {
1912 if (rotated)
1913 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, NE, NULL_RTX,
1914 Pmode, 1, loop_lab);
1915 else
1916 emit_jump (loop_lab);
1917
1918 emit_label (end_loop);
1919
1920 }
1921
1922 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
1923 while probing it. This pushes when SIZE is positive. SIZE need not
1924 be constant.
1925
1926 This is subtly different than anti_adjust_stack_and_probe to try and
1927 prevent stack-clash attacks
1928
1929 1. It must assume no knowledge of the probing state, any allocation
1930 must probe.
1931
1932 Consider the case of a 1 byte alloca in a loop. If the sum of the
1933 allocations is large, then this could be used to jump the guard if
1934 probes were not emitted.
1935
1936 2. It never skips probes, whereas anti_adjust_stack_and_probe will
1937 skip probes on the first couple PROBE_INTERVALs on the assumption
1938 they're done elsewhere.
1939
1940 3. It only allocates and probes SIZE bytes, it does not need to
1941 allocate/probe beyond that because this probing style does not
1942 guarantee signal handling capability if the guard is hit. */
1943
1944 static void
1945 anti_adjust_stack_and_probe_stack_clash (rtx size)
1946 {
1947 /* First ensure SIZE is Pmode. */
1948 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
1949 size = convert_to_mode (Pmode, size, 1);
1950
1951 /* We can get here with a constant size on some targets. */
1952 rtx rounded_size, last_addr, residual;
1953 HOST_WIDE_INT probe_interval, probe_range;
1954 bool target_probe_range_p = false;
1955 compute_stack_clash_protection_loop_data (&rounded_size, &last_addr,
1956 &residual, &probe_interval, size);
1957
1958 /* Get the back-end specific probe ranges. */
1959 probe_range = targetm.stack_clash_protection_alloca_probe_range ();
1960 target_probe_range_p = probe_range != 0;
1961 gcc_assert (probe_range >= 0);
1962
1963 /* If no back-end specific range defined, default to the top of the newly
1964 allocated range. */
1965 if (probe_range == 0)
1966 probe_range = probe_interval - GET_MODE_SIZE (word_mode);
1967
1968 if (rounded_size != CONST0_RTX (Pmode))
1969 {
1970 if (CONST_INT_P (rounded_size)
1971 && INTVAL (rounded_size) <= 4 * probe_interval)
1972 {
1973 for (HOST_WIDE_INT i = 0;
1974 i < INTVAL (rounded_size);
1975 i += probe_interval)
1976 {
1977 anti_adjust_stack (GEN_INT (probe_interval));
1978 /* The prologue does not probe residuals. Thus the offset
1979 here to probe just beyond what the prologue had already
1980 allocated. */
1981 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
1982 probe_range));
1983
1984 emit_insn (gen_blockage ());
1985 }
1986 }
1987 else
1988 {
1989 rtx loop_lab, end_loop;
1990 bool rotate_loop = CONST_INT_P (rounded_size);
1991 emit_stack_clash_protection_probe_loop_start (&loop_lab, &end_loop,
1992 last_addr, rotate_loop);
1993
1994 anti_adjust_stack (GEN_INT (probe_interval));
1995
1996 /* The prologue does not probe residuals. Thus the offset here
1997 to probe just beyond what the prologue had already
1998 allocated. */
1999 emit_stack_probe (plus_constant (Pmode, stack_pointer_rtx,
2000 probe_range));
2001
2002 emit_stack_clash_protection_probe_loop_end (loop_lab, end_loop,
2003 last_addr, rotate_loop);
2004 emit_insn (gen_blockage ());
2005 }
2006 }
2007
2008 if (residual != CONST0_RTX (Pmode))
2009 {
2010 rtx label = NULL_RTX;
2011 /* RESIDUAL could be zero at runtime and in that case *sp could
2012 hold live data. Furthermore, we do not want to probe into the
2013 red zone.
2014
2015 If TARGET_PROBE_RANGE_P then the target has promised it's safe to
2016 probe at offset 0. In which case we no longer have to check for
2017 RESIDUAL == 0. However we still need to probe at the right offset
2018 when RESIDUAL > PROBE_RANGE, in which case we probe at PROBE_RANGE.
2019
2020 If !TARGET_PROBE_RANGE_P then go ahead and just guard the probe at *sp
2021 on RESIDUAL != 0 at runtime if RESIDUAL is not a compile time constant.
2022 */
2023 anti_adjust_stack (residual);
2024
2025 if (!CONST_INT_P (residual))
2026 {
2027 label = gen_label_rtx ();
2028 rtx_code op = target_probe_range_p ? LT : EQ;
2029 rtx probe_cmp_value = target_probe_range_p
2030 ? gen_rtx_CONST_INT (GET_MODE (residual), probe_range)
2031 : CONST0_RTX (GET_MODE (residual));
2032
2033 if (target_probe_range_p)
2034 emit_stack_probe (stack_pointer_rtx);
2035
2036 emit_cmp_and_jump_insns (residual, probe_cmp_value,
2037 op, NULL_RTX, Pmode, 1, label);
2038 }
2039
2040 rtx x = NULL_RTX;
2041
2042 /* If RESIDUAL isn't a constant and TARGET_PROBE_RANGE_P then we probe up
2043 by the ABI defined safe value. */
2044 if (!CONST_INT_P (residual) && target_probe_range_p)
2045 x = GEN_INT (probe_range);
2046 /* If RESIDUAL is a constant but smaller than the ABI defined safe value,
2047 we still want to probe up, but the safest amount if a word. */
2048 else if (target_probe_range_p)
2049 {
2050 if (INTVAL (residual) <= probe_range)
2051 x = GEN_INT (GET_MODE_SIZE (word_mode));
2052 else
2053 x = GEN_INT (probe_range);
2054 }
2055 else
2056 /* If nothing else, probe at the top of the new allocation. */
2057 x = plus_constant (Pmode, residual, -GET_MODE_SIZE (word_mode));
2058
2059 emit_stack_probe (gen_rtx_PLUS (Pmode, stack_pointer_rtx, x));
2060
2061 emit_insn (gen_blockage ());
2062 if (!CONST_INT_P (residual))
2063 emit_label (label);
2064 }
2065 }
2066
2067
2068 /* Adjust the stack pointer by minus SIZE (an rtx for a number of bytes)
2069 while probing it. This pushes when SIZE is positive. SIZE need not
2070 be constant. If ADJUST_BACK is true, adjust back the stack pointer
2071 by plus SIZE at the end. */
2072
2073 void
2074 anti_adjust_stack_and_probe (rtx size, bool adjust_back)
2075 {
2076 /* We skip the probe for the first interval + a small dope of 4 words and
2077 probe that many bytes past the specified size to maintain a protection
2078 area at the botton of the stack. */
2079 const int dope = 4 * UNITS_PER_WORD;
2080
2081 /* First ensure SIZE is Pmode. */
2082 if (GET_MODE (size) != VOIDmode && GET_MODE (size) != Pmode)
2083 size = convert_to_mode (Pmode, size, 1);
2084
2085 /* If we have a constant small number of probes to generate, that's the
2086 easy case. */
2087 if (CONST_INT_P (size) && INTVAL (size) < 7 * PROBE_INTERVAL)
2088 {
2089 HOST_WIDE_INT isize = INTVAL (size), i;
2090 bool first_probe = true;
2091
2092 /* Adjust SP and probe at PROBE_INTERVAL + N * PROBE_INTERVAL for
2093 values of N from 1 until it exceeds SIZE. If only one probe is
2094 needed, this will not generate any code. Then adjust and probe
2095 to PROBE_INTERVAL + SIZE. */
2096 for (i = PROBE_INTERVAL; i < isize; i += PROBE_INTERVAL)
2097 {
2098 if (first_probe)
2099 {
2100 anti_adjust_stack (GEN_INT (2 * PROBE_INTERVAL + dope));
2101 first_probe = false;
2102 }
2103 else
2104 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2105 emit_stack_probe (stack_pointer_rtx);
2106 }
2107
2108 if (first_probe)
2109 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2110 else
2111 anti_adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL - i));
2112 emit_stack_probe (stack_pointer_rtx);
2113 }
2114
2115 /* In the variable case, do the same as above, but in a loop. Note that we
2116 must be extra careful with variables wrapping around because we might be
2117 at the very top (or the very bottom) of the address space and we have to
2118 be able to handle this case properly; in particular, we use an equality
2119 test for the loop condition. */
2120 else
2121 {
2122 rtx rounded_size, rounded_size_op, last_addr, temp;
2123 rtx_code_label *loop_lab = gen_label_rtx ();
2124 rtx_code_label *end_lab = gen_label_rtx ();
2125
2126
2127 /* Step 1: round SIZE to the previous multiple of the interval. */
2128
2129 /* ROUNDED_SIZE = SIZE & -PROBE_INTERVAL */
2130 rounded_size
2131 = simplify_gen_binary (AND, Pmode, size,
2132 gen_int_mode (-PROBE_INTERVAL, Pmode));
2133 rounded_size_op = force_operand (rounded_size, NULL_RTX);
2134
2135
2136 /* Step 2: compute initial and final value of the loop counter. */
2137
2138 /* SP = SP_0 + PROBE_INTERVAL. */
2139 anti_adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2140
2141 /* LAST_ADDR = SP_0 + PROBE_INTERVAL + ROUNDED_SIZE. */
2142 last_addr = force_operand (gen_rtx_fmt_ee (STACK_GROW_OP, Pmode,
2143 stack_pointer_rtx,
2144 rounded_size_op), NULL_RTX);
2145
2146
2147 /* Step 3: the loop
2148
2149 while (SP != LAST_ADDR)
2150 {
2151 SP = SP + PROBE_INTERVAL
2152 probe at SP
2153 }
2154
2155 adjusts SP and probes at PROBE_INTERVAL + N * PROBE_INTERVAL for
2156 values of N from 1 until it is equal to ROUNDED_SIZE. */
2157
2158 emit_label (loop_lab);
2159
2160 /* Jump to END_LAB if SP == LAST_ADDR. */
2161 emit_cmp_and_jump_insns (stack_pointer_rtx, last_addr, EQ, NULL_RTX,
2162 Pmode, 1, end_lab);
2163
2164 /* SP = SP + PROBE_INTERVAL and probe at SP. */
2165 anti_adjust_stack (GEN_INT (PROBE_INTERVAL));
2166 emit_stack_probe (stack_pointer_rtx);
2167
2168 emit_jump (loop_lab);
2169
2170 emit_label (end_lab);
2171
2172
2173 /* Step 4: adjust SP and probe at PROBE_INTERVAL + SIZE if we cannot
2174 assert at compile-time that SIZE is equal to ROUNDED_SIZE. */
2175
2176 /* TEMP = SIZE - ROUNDED_SIZE. */
2177 temp = simplify_gen_binary (MINUS, Pmode, size, rounded_size);
2178 if (temp != const0_rtx)
2179 {
2180 /* Manual CSE if the difference is not known at compile-time. */
2181 if (GET_CODE (temp) != CONST_INT)
2182 temp = gen_rtx_MINUS (Pmode, size, rounded_size_op);
2183 anti_adjust_stack (temp);
2184 emit_stack_probe (stack_pointer_rtx);
2185 }
2186 }
2187
2188 /* Adjust back and account for the additional first interval. */
2189 if (adjust_back)
2190 adjust_stack (plus_constant (Pmode, size, PROBE_INTERVAL + dope));
2191 else
2192 adjust_stack (GEN_INT (PROBE_INTERVAL + dope));
2193 }
2194
2195 /* Return an rtx representing the register or memory location
2196 in which a scalar value of data type VALTYPE
2197 was returned by a function call to function FUNC.
2198 FUNC is a FUNCTION_DECL, FNTYPE a FUNCTION_TYPE node if the precise
2199 function is known, otherwise 0.
2200 OUTGOING is 1 if on a machine with register windows this function
2201 should return the register in which the function will put its result
2202 and 0 otherwise. */
2203
2204 rtx
2205 hard_function_value (const_tree valtype, const_tree func, const_tree fntype,
2206 int outgoing ATTRIBUTE_UNUSED)
2207 {
2208 rtx val;
2209
2210 val = targetm.calls.function_value (valtype, func ? func : fntype, outgoing);
2211
2212 if (REG_P (val)
2213 && GET_MODE (val) == BLKmode)
2214 {
2215 unsigned HOST_WIDE_INT bytes = arg_int_size_in_bytes (valtype);
2216 opt_scalar_int_mode tmpmode;
2217
2218 /* int_size_in_bytes can return -1. We don't need a check here
2219 since the value of bytes will then be large enough that no
2220 mode will match anyway. */
2221
2222 FOR_EACH_MODE_IN_CLASS (tmpmode, MODE_INT)
2223 {
2224 /* Have we found a large enough mode? */
2225 if (GET_MODE_SIZE (tmpmode.require ()) >= bytes)
2226 break;
2227 }
2228
2229 PUT_MODE (val, tmpmode.require ());
2230 }
2231 return val;
2232 }
2233
2234 /* Return an rtx representing the register or memory location
2235 in which a scalar value of mode MODE was returned by a library call. */
2236
2237 rtx
2238 hard_libcall_value (machine_mode mode, rtx fun)
2239 {
2240 return targetm.calls.libcall_value (mode, fun);
2241 }
2242
2243 /* Look up the tree code for a given rtx code
2244 to provide the arithmetic operation for real_arithmetic.
2245 The function returns an int because the caller may not know
2246 what `enum tree_code' means. */
2247
2248 int
2249 rtx_to_tree_code (enum rtx_code code)
2250 {
2251 enum tree_code tcode;
2252
2253 switch (code)
2254 {
2255 case PLUS:
2256 tcode = PLUS_EXPR;
2257 break;
2258 case MINUS:
2259 tcode = MINUS_EXPR;
2260 break;
2261 case MULT:
2262 tcode = MULT_EXPR;
2263 break;
2264 case DIV:
2265 tcode = RDIV_EXPR;
2266 break;
2267 case SMIN:
2268 tcode = MIN_EXPR;
2269 break;
2270 case SMAX:
2271 tcode = MAX_EXPR;
2272 break;
2273 default:
2274 tcode = LAST_AND_UNUSED_TREE_CODE;
2275 break;
2276 }
2277 return ((int) tcode);
2278 }
2279
2280 #include "gt-explow.h"