]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/combine-stack-adj.c
[Ada] Two typo fixes
[thirdparty/gcc.git] / gcc / combine-stack-adj.c
CommitLineData
c7a0240a 1/* Combine stack adjustments.
8d9254fc 2 Copyright (C) 1987-2020 Free Software Foundation, Inc.
c7a0240a
SB
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify it under
7the terms of the GNU General Public License as published by the Free
9dcd6f09 8Software Foundation; either version 3, or (at your option) any later
c7a0240a
SB
9version.
10
11GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
c7a0240a
SB
19
20/* Track stack adjustments and stack memory references. Attempt to
21 reduce the number of stack adjustments by back-propagating across
22 the memory references.
23
24 This is intended primarily for use with targets that do not define
25 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
26 targets that define PREFERRED_STACK_BOUNDARY more aligned than
27 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
28 (e.g. x86 fp regs) which would ordinarily have to be implemented
29 as a sub/mov pair due to restrictions in calls.c.
30
31 Propagation stops when any of the insns that need adjusting are
32 (a) no longer valid because we've exceeded their range, (b) a
33 non-trivial push instruction, or (c) a call instruction.
34
35 Restriction B is based on the assumption that push instructions
36 are smaller or faster. If a port really wants to remove all
37 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
38 one exception that is made is for an add immediately followed
39 by a push. */
40
41#include "config.h"
42#include "system.h"
43#include "coretypes.h"
c7131fb2 44#include "backend.h"
c7a0240a 45#include "rtl.h"
c7131fb2 46#include "df.h"
c7a0240a 47#include "insn-config.h"
4d0cdd0c 48#include "memmodel.h"
957060b5
AM
49#include "emit-rtl.h"
50#include "recog.h"
60393bbc 51#include "cfgrtl.h"
c7a0240a 52#include "tree-pass.h"
f8305d18 53#include "rtl-iter.h"
c7a0240a
SB
54
55\f
90588a10
JJ
56/* This structure records two kinds of stack references between stack
57 adjusting instructions: stack references in memory addresses for
58 regular insns and all stack references for debug insns. */
c7a0240a 59
90588a10 60struct csa_reflist
c7a0240a
SB
61{
62 HOST_WIDE_INT sp_offset;
71e88baf
DM
63 rtx_insn *insn;
64 rtx *ref;
90588a10 65 struct csa_reflist *next;
c7a0240a
SB
66};
67
68static int stack_memref_p (rtx);
71e88baf 69static rtx single_set_for_csa (rtx_insn *);
90588a10 70static void free_csa_reflist (struct csa_reflist *);
71e88baf 71static struct csa_reflist *record_one_stack_ref (rtx_insn *, rtx *,
90588a10 72 struct csa_reflist *);
d44f14cc
JJ
73static bool try_apply_stack_adjustment (rtx_insn *, struct csa_reflist *,
74 HOST_WIDE_INT, HOST_WIDE_INT,
75 bitmap, rtx_insn *);
76static void combine_stack_adjustments_for_block (basic_block, bitmap);
c7a0240a
SB
77
78
79/* Main entry point for stack adjustment combination. */
80
81static void
82combine_stack_adjustments (void)
83{
84 basic_block bb;
d44f14cc 85 bitmap live = BITMAP_ALLOC (&reg_obstack);
c7a0240a 86
11cd3bed 87 FOR_EACH_BB_FN (bb, cfun)
d44f14cc
JJ
88 combine_stack_adjustments_for_block (bb, live);
89
90 BITMAP_FREE (live);
c7a0240a
SB
91}
92
93/* Recognize a MEM of the form (sp) or (plus sp const). */
94
95static int
96stack_memref_p (rtx x)
97{
98 if (!MEM_P (x))
99 return 0;
100 x = XEXP (x, 0);
101
102 if (x == stack_pointer_rtx)
103 return 1;
104 if (GET_CODE (x) == PLUS
105 && XEXP (x, 0) == stack_pointer_rtx
481683e1 106 && CONST_INT_P (XEXP (x, 1)))
c7a0240a
SB
107 return 1;
108
109 return 0;
110}
111
112/* Recognize either normal single_set or the hack in i386.md for
113 tying fp and sp adjustments. */
114
115static rtx
71e88baf 116single_set_for_csa (rtx_insn *insn)
c7a0240a
SB
117{
118 int i;
119 rtx tmp = single_set (insn);
120 if (tmp)
121 return tmp;
122
123 if (!NONJUMP_INSN_P (insn)
124 || GET_CODE (PATTERN (insn)) != PARALLEL)
125 return NULL_RTX;
126
127 tmp = PATTERN (insn);
128 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
129 return NULL_RTX;
130
131 for (i = 1; i < XVECLEN (tmp, 0); ++i)
132 {
48c54229 133 rtx this_rtx = XVECEXP (tmp, 0, i);
c7a0240a
SB
134
135 /* The special case is allowing a no-op set. */
48c54229
KG
136 if (GET_CODE (this_rtx) == SET
137 && SET_SRC (this_rtx) == SET_DEST (this_rtx))
c7a0240a 138 ;
48c54229
KG
139 else if (GET_CODE (this_rtx) != CLOBBER
140 && GET_CODE (this_rtx) != USE)
c7a0240a
SB
141 return NULL_RTX;
142 }
143
144 return XVECEXP (tmp, 0, 0);
145}
146
90588a10 147/* Free the list of csa_reflist nodes. */
c7a0240a
SB
148
149static void
90588a10 150free_csa_reflist (struct csa_reflist *reflist)
c7a0240a 151{
90588a10
JJ
152 struct csa_reflist *next;
153 for (; reflist ; reflist = next)
c7a0240a 154 {
90588a10
JJ
155 next = reflist->next;
156 free (reflist);
c7a0240a
SB
157 }
158}
159
90588a10
JJ
160/* Create a new csa_reflist node from the given stack reference.
161 It is already known that the reference is either a MEM satisfying the
162 predicate stack_memref_p or a REG representing the stack pointer. */
c7a0240a 163
90588a10 164static struct csa_reflist *
71e88baf 165record_one_stack_ref (rtx_insn *insn, rtx *ref, struct csa_reflist *next_reflist)
c7a0240a 166{
90588a10 167 struct csa_reflist *ml;
c7a0240a 168
90588a10 169 ml = XNEW (struct csa_reflist);
c7a0240a 170
90588a10 171 if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
c7a0240a
SB
172 ml->sp_offset = 0;
173 else
90588a10 174 ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
c7a0240a
SB
175
176 ml->insn = insn;
90588a10
JJ
177 ml->ref = ref;
178 ml->next = next_reflist;
c7a0240a
SB
179
180 return ml;
181}
182
3dce0964
RH
183/* We only know how to adjust the CFA; no other frame-related changes
184 may appear in any insn to be deleted. */
185
186static bool
187no_unhandled_cfa (rtx_insn *insn)
188{
189 if (!RTX_FRAME_RELATED_P (insn))
190 return true;
191
192 /* No CFA notes at all is a legacy interpretation like
193 FRAME_RELATED_EXPR, and is context sensitive within
194 the prologue state machine. We can't handle that here. */
195 bool has_cfa_adjust = false;
196
197 for (rtx link = REG_NOTES (insn); link; link = XEXP (link, 1))
198 switch (REG_NOTE_KIND (link))
199 {
200 default:
201 break;
202 case REG_CFA_ADJUST_CFA:
203 has_cfa_adjust = true;
204 break;
205
206 case REG_FRAME_RELATED_EXPR:
207 case REG_CFA_DEF_CFA:
208 case REG_CFA_OFFSET:
209 case REG_CFA_REGISTER:
210 case REG_CFA_EXPRESSION:
211 case REG_CFA_RESTORE:
212 case REG_CFA_SET_VDRAP:
213 case REG_CFA_WINDOW_SAVE:
214 case REG_CFA_FLUSH_QUEUE:
27169e45 215 case REG_CFA_TOGGLE_RA_MANGLE:
3dce0964
RH
216 return false;
217 }
218
219 return has_cfa_adjust;
220}
221
c7a0240a 222/* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
90588a10
JJ
223 as each of the memories and stack references in REFLIST. Return true
224 on success. */
c7a0240a 225
d44f14cc 226static bool
71e88baf 227try_apply_stack_adjustment (rtx_insn *insn, struct csa_reflist *reflist,
d44f14cc
JJ
228 HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta,
229 bitmap live, rtx_insn *other_insn)
c7a0240a 230{
90588a10 231 struct csa_reflist *ml;
c7a0240a 232 rtx set;
d44f14cc 233 bool remove_equal = false;
c7a0240a
SB
234
235 set = single_set_for_csa (insn);
1362aa31
EB
236 if (MEM_P (SET_DEST (set)))
237 validate_change (insn, &SET_DEST (set),
238 replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
239 1);
d44f14cc
JJ
240 else if (REG_P (SET_SRC (set)))
241 {
242 if (other_insn == NULL_RTX || live == NULL)
243 return false;
244 rtx other_set = single_set_for_csa (other_insn);
245 if (SET_DEST (other_set) != stack_pointer_rtx
246 || GET_CODE (SET_SRC (other_set)) != PLUS
247 || XEXP (SET_SRC (other_set), 0) != stack_pointer_rtx
248 || !CONST_INT_P (XEXP (SET_SRC (other_set), 1)))
249 return false;
250 if (PATTERN (other_insn) != other_set)
251 {
252 if (GET_CODE (PATTERN (other_insn)) != PARALLEL)
253 return false;
254 int i;
255 rtx p = PATTERN (other_insn);
256 for (i = 0; i < XVECLEN (p, 0); ++i)
257 {
258 rtx this_rtx = XVECEXP (p, 0, i);
259 if (this_rtx == other_set)
260 continue;
261 if (GET_CODE (this_rtx) != CLOBBER)
262 return false;
263 if (!REG_P (XEXP (this_rtx, 0))
264 || !HARD_REGISTER_P (XEXP (this_rtx, 0)))
265 return false;
266 unsigned int end_regno = END_REGNO (XEXP (this_rtx, 0));
267 for (unsigned int regno = REGNO (XEXP (this_rtx, 0));
268 regno < end_regno; ++regno)
269 if (bitmap_bit_p (live, regno))
270 return false;
271 }
272 }
273 validate_change (insn, &PATTERN (insn), copy_rtx (PATTERN (other_insn)),
274 1);
275 set = single_set_for_csa (insn);
276 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust),
277 1);
278 remove_equal = true;
279 }
1362aa31
EB
280 else
281 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
c7a0240a 282
90588a10
JJ
283 for (ml = reflist; ml ; ml = ml->next)
284 {
0a81f074
RS
285 rtx new_addr = plus_constant (Pmode, stack_pointer_rtx,
286 ml->sp_offset - delta);
90588a10
JJ
287 rtx new_val;
288
289 if (MEM_P (*ml->ref))
290 new_val = replace_equiv_address_nv (*ml->ref, new_addr);
291 else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
292 new_val = new_addr;
293 else
294 new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
295 GET_MODE (new_addr));
296 validate_change (ml->insn, ml->ref, new_val, 1);
297 }
c7a0240a
SB
298
299 if (apply_change_group ())
300 {
90588a10
JJ
301 /* Succeeded. Update our knowledge of the stack references. */
302 for (ml = reflist; ml ; ml = ml->next)
c7a0240a
SB
303 ml->sp_offset -= delta;
304
d44f14cc
JJ
305 if (remove_equal)
306 remove_reg_equal_equiv_notes (insn);
307 return true;
c7a0240a
SB
308 }
309 else
d44f14cc 310 return false;
c7a0240a
SB
311}
312
f8305d18
RS
313/* For non-debug insns, record all stack memory references in INSN
314 and return true if there were no other (unrecorded) references to the
315 stack pointer. For debug insns, record all stack references regardless
316 of context and unconditionally return true. */
c7a0240a 317
f8305d18
RS
318static bool
319record_stack_refs (rtx_insn *insn, struct csa_reflist **reflist)
c7a0240a 320{
f8305d18
RS
321 subrtx_ptr_iterator::array_type array;
322 FOR_EACH_SUBRTX_PTR (iter, array, &PATTERN (insn), NONCONST)
c7a0240a 323 {
f8305d18
RS
324 rtx *loc = *iter;
325 rtx x = *loc;
326 switch (GET_CODE (x))
90588a10 327 {
f8305d18
RS
328 case MEM:
329 if (!reg_mentioned_p (stack_pointer_rtx, x))
330 iter.skip_subrtxes ();
331 /* We are not able to handle correctly all possible memrefs
332 containing stack pointer, so this check is necessary. */
333 else if (stack_memref_p (x))
334 {
335 *reflist = record_one_stack_ref (insn, loc, *reflist);
336 iter.skip_subrtxes ();
337 }
338 /* Try harder for DEBUG_INSNs, handle e.g.
339 (mem (mem (sp + 16) + 4). */
340 else if (!DEBUG_INSN_P (insn))
341 return false;
342 break;
343
344 case REG:
345 /* ??? We want be able to handle non-memory stack pointer
346 references later. For now just discard all insns referring to
347 stack pointer outside mem expressions. We would probably
348 want to teach validate_replace to simplify expressions first.
349
350 We can't just compare with STACK_POINTER_RTX because the
351 reference to the stack pointer might be in some other mode.
352 In particular, an explicit clobber in an asm statement will
353 result in a QImode clobber.
354
355 In DEBUG_INSNs, we want to replace all occurrences, otherwise
356 they will cause -fcompare-debug failures. */
357 if (REGNO (x) == STACK_POINTER_REGNUM)
358 {
359 if (!DEBUG_INSN_P (insn))
360 return false;
361 *reflist = record_one_stack_ref (insn, loc, *reflist);
362 }
363 break;
364
365 default:
366 break;
90588a10 367 }
c7a0240a 368 }
f8305d18 369 return true;
c7a0240a
SB
370}
371
4efb91df
RH
372/* If INSN has a REG_ARGS_SIZE note, move it to LAST.
373 AFTER is true iff LAST follows INSN in the instruction stream. */
a182fb6b
JJ
374
375static void
71e88baf 376maybe_move_args_size_note (rtx_insn *last, rtx_insn *insn, bool after)
a182fb6b 377{
9a08d230 378 rtx note, last_note;
a182fb6b 379
9a08d230
RH
380 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
381 if (note == NULL)
a182fb6b
JJ
382 return;
383
9a08d230
RH
384 last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
385 if (last_note)
4efb91df
RH
386 {
387 /* The ARGS_SIZE notes are *not* cumulative. They represent an
388 absolute value, and the "most recent" note wins. */
389 if (!after)
390 XEXP (last_note, 0) = XEXP (note, 0);
391 }
a182fb6b 392 else
9a08d230 393 add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
a182fb6b
JJ
394}
395
3dce0964
RH
396/* Merge any REG_CFA_ADJUST_CFA note from SRC into DST.
397 AFTER is true iff DST follows SRC in the instruction stream. */
398
399static void
400maybe_merge_cfa_adjust (rtx_insn *dst, rtx_insn *src, bool after)
401{
402 rtx snote = NULL, dnote = NULL;
403 rtx sexp, dexp;
404 rtx exp1, exp2;
405
406 if (RTX_FRAME_RELATED_P (src))
407 snote = find_reg_note (src, REG_CFA_ADJUST_CFA, NULL_RTX);
408 if (snote == NULL)
409 return;
410 sexp = XEXP (snote, 0);
411
412 if (RTX_FRAME_RELATED_P (dst))
413 dnote = find_reg_note (dst, REG_CFA_ADJUST_CFA, NULL_RTX);
414 if (dnote == NULL)
415 {
416 add_reg_note (dst, REG_CFA_ADJUST_CFA, sexp);
417 return;
418 }
419 dexp = XEXP (dnote, 0);
420
421 gcc_assert (GET_CODE (sexp) == SET);
422 gcc_assert (GET_CODE (dexp) == SET);
423
424 if (after)
425 exp1 = dexp, exp2 = sexp;
426 else
427 exp1 = sexp, exp2 = dexp;
428
429 SET_SRC (exp1) = simplify_replace_rtx (SET_SRC (exp1), SET_DEST (exp2),
430 SET_SRC (exp2));
431 XEXP (dnote, 0) = exp1;
432}
433
3825692d
RH
434/* Return the next (or previous) active insn within BB. */
435
71e88baf
DM
436static rtx_insn *
437prev_active_insn_bb (basic_block bb, rtx_insn *insn)
3825692d
RH
438{
439 for (insn = PREV_INSN (insn);
440 insn != PREV_INSN (BB_HEAD (bb));
441 insn = PREV_INSN (insn))
442 if (active_insn_p (insn))
443 return insn;
71e88baf 444 return NULL;
3825692d
RH
445}
446
71e88baf
DM
447static rtx_insn *
448next_active_insn_bb (basic_block bb, rtx_insn *insn)
3825692d
RH
449{
450 for (insn = NEXT_INSN (insn);
451 insn != NEXT_INSN (BB_END (bb));
452 insn = NEXT_INSN (insn))
453 if (active_insn_p (insn))
454 return insn;
71e88baf 455 return NULL;
3825692d
RH
456}
457
458/* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV. Otherwise
459 search for a nearby candidate within BB where we can stick the note. */
460
461static void
71e88baf 462force_move_args_size_note (basic_block bb, rtx_insn *prev, rtx_insn *insn)
3825692d 463{
71e88baf
DM
464 rtx note;
465 rtx_insn *test, *next_candidate, *prev_candidate;
3825692d
RH
466
467 /* If PREV exists, tail-call to the logic in the other function. */
468 if (prev)
469 {
470 maybe_move_args_size_note (prev, insn, false);
471 return;
472 }
473
474 /* First, make sure there's anything that needs doing. */
475 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
476 if (note == NULL)
477 return;
478
479 /* We need to find a spot between the previous and next exception points
480 where we can place the note and "properly" deallocate the arguments. */
481 next_candidate = prev_candidate = NULL;
482
483 /* It is often the case that we have insns in the order:
484 call
485 add sp (previous deallocation)
486 sub sp (align for next arglist)
487 push arg
488 and the add/sub cancel. Therefore we begin by searching forward. */
489
490 test = insn;
491 while ((test = next_active_insn_bb (bb, test)) != NULL)
492 {
493 /* Found an existing note: nothing to do. */
494 if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
495 return;
496 /* Found something that affects unwinding. Stop searching. */
497 if (CALL_P (test) || !insn_nothrow_p (test))
498 break;
499 if (next_candidate == NULL)
500 next_candidate = test;
501 }
502
503 test = insn;
504 while ((test = prev_active_insn_bb (bb, test)) != NULL)
505 {
506 rtx tnote;
507 /* Found a place that seems logical to adjust the stack. */
508 tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
509 if (tnote)
510 {
511 XEXP (tnote, 0) = XEXP (note, 0);
512 return;
513 }
514 if (prev_candidate == NULL)
515 prev_candidate = test;
516 /* Found something that affects unwinding. Stop searching. */
517 if (CALL_P (test) || !insn_nothrow_p (test))
518 break;
519 }
520
521 if (prev_candidate)
522 test = prev_candidate;
523 else if (next_candidate)
524 test = next_candidate;
525 else
526 {
527 /* ??? We *must* have a place, lest we ICE on the lost adjustment.
528 Options are: dummy clobber insn, nop, or prevent the removal of
65c2e636
RH
529 the sp += 0 insn. */
530 /* TODO: Find another way to indicate to the dwarf2 code that we
531 have not in fact lost an adjustment. */
532 test = emit_insn_before (gen_rtx_CLOBBER (VOIDmode, const0_rtx), insn);
3825692d
RH
533 }
534 add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
535}
536
c7a0240a
SB
537/* Subroutine of combine_stack_adjustments, called for each basic block. */
538
539static void
d44f14cc 540combine_stack_adjustments_for_block (basic_block bb, bitmap live)
c7a0240a
SB
541{
542 HOST_WIDE_INT last_sp_adjust = 0;
71e88baf
DM
543 rtx_insn *last_sp_set = NULL;
544 rtx_insn *last2_sp_set = NULL;
d44f14cc 545 bitmap last_sp_live = NULL;
90588a10 546 struct csa_reflist *reflist = NULL;
d44f14cc 547 bitmap copy = NULL;
71e88baf
DM
548 rtx_insn *insn, *next;
549 rtx set;
c7a0240a
SB
550 bool end_of_block = false;
551
d44f14cc
JJ
552 bitmap_copy (live, DF_LR_IN (bb));
553 df_simulate_initialize_forwards (bb, live);
554
c7a0240a
SB
555 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
556 {
557 end_of_block = insn == BB_END (bb);
558 next = NEXT_INSN (insn);
559
560 if (! INSN_P (insn))
561 continue;
562
563 set = single_set_for_csa (insn);
ac9c032e
JL
564 if (set && find_reg_note (insn, REG_STACK_CHECK, NULL_RTX))
565 set = NULL_RTX;
c7a0240a
SB
566 if (set)
567 {
568 rtx dest = SET_DEST (set);
569 rtx src = SET_SRC (set);
d44f14cc 570 HOST_WIDE_INT this_adjust = 0;
c7a0240a
SB
571
572 /* Find constant additions to the stack pointer. */
573 if (dest == stack_pointer_rtx
574 && GET_CODE (src) == PLUS
575 && XEXP (src, 0) == stack_pointer_rtx
481683e1 576 && CONST_INT_P (XEXP (src, 1)))
d44f14cc
JJ
577 this_adjust = INTVAL (XEXP (src, 1));
578 /* Or such additions turned by postreload into a store of
579 equivalent register. */
580 else if (dest == stack_pointer_rtx
581 && REG_P (src)
582 && REGNO (src) != STACK_POINTER_REGNUM)
583 if (rtx equal = find_reg_note (insn, REG_EQUAL, NULL_RTX))
584 if (GET_CODE (XEXP (equal, 0)) == PLUS
585 && XEXP (XEXP (equal, 0), 0) == stack_pointer_rtx
586 && CONST_INT_P (XEXP (XEXP (equal, 0), 1)))
587 this_adjust = INTVAL (XEXP (XEXP (equal, 0), 1));
588
589 if (this_adjust)
c7a0240a 590 {
c7a0240a
SB
591 /* If we've not seen an adjustment previously, record
592 it now and continue. */
593 if (! last_sp_set)
594 {
595 last_sp_set = insn;
596 last_sp_adjust = this_adjust;
d44f14cc
JJ
597 if (REG_P (src))
598 {
599 if (copy == NULL)
600 copy = BITMAP_ALLOC (&reg_obstack);
601 last_sp_live = copy;
602 bitmap_copy (last_sp_live, live);
603 }
604 else
605 last_sp_live = NULL;
606 df_simulate_one_insn_forwards (bb, insn, live);
c7a0240a
SB
607 continue;
608 }
609
90588a10 610 /* If not all recorded refs can be adjusted, or the
c7a0240a
SB
611 adjustment is now too large for a constant addition,
612 we cannot merge the two stack adjustments.
613
614 Also we need to be careful to not move stack pointer
615 such that we create stack accesses outside the allocated
616 area. We can combine an allocation into the first insn,
67914693 617 or a deallocation into the second insn. We cannot
c7a0240a
SB
618 combine an allocation followed by a deallocation.
619
620 The only somewhat frequent occurrence of the later is when
621 a function allocates a stack frame but does not use it.
622 For this case, we would need to analyze rtl stream to be
623 sure that allocated area is really unused. This means not
624 only checking the memory references, but also all registers
625 or global memory references possibly containing a stack
626 frame address.
627
628 Perhaps the best way to address this problem is to teach
629 gcc not to allocate stack for objects never used. */
630
631 /* Combine an allocation into the first instruction. */
632 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
633 {
3dce0964
RH
634 if (no_unhandled_cfa (insn)
635 && try_apply_stack_adjustment (last_sp_set, reflist,
636 last_sp_adjust
637 + this_adjust,
d44f14cc
JJ
638 this_adjust,
639 last_sp_live,
640 insn))
c7a0240a
SB
641 {
642 /* It worked! */
3825692d 643 maybe_move_args_size_note (last_sp_set, insn, false);
3dce0964 644 maybe_merge_cfa_adjust (last_sp_set, insn, false);
c7a0240a
SB
645 delete_insn (insn);
646 last_sp_adjust += this_adjust;
d44f14cc 647 last_sp_live = NULL;
c7a0240a
SB
648 continue;
649 }
650 }
651
652 /* Otherwise we have a deallocation. Do not combine with
653 a previous allocation. Combine into the second insn. */
654 else if (STACK_GROWS_DOWNWARD
655 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
656 {
3dce0964 657 if (no_unhandled_cfa (last_sp_set)
d44f14cc 658 && !REG_P (src)
3dce0964
RH
659 && try_apply_stack_adjustment (insn, reflist,
660 last_sp_adjust
661 + this_adjust,
d44f14cc
JJ
662 -last_sp_adjust,
663 NULL, NULL))
c7a0240a
SB
664 {
665 /* It worked! */
3825692d 666 maybe_move_args_size_note (insn, last_sp_set, true);
3dce0964 667 maybe_merge_cfa_adjust (insn, last_sp_set, true);
c7a0240a
SB
668 delete_insn (last_sp_set);
669 last_sp_set = insn;
670 last_sp_adjust += this_adjust;
d44f14cc 671 last_sp_live = NULL;
90588a10
JJ
672 free_csa_reflist (reflist);
673 reflist = NULL;
d44f14cc 674 df_simulate_one_insn_forwards (bb, insn, live);
c7a0240a
SB
675 continue;
676 }
677 }
678
679 /* Combination failed. Restart processing from here. If
680 deallocation+allocation conspired to cancel, we can
681 delete the old deallocation insn. */
3825692d
RH
682 if (last_sp_set)
683 {
3dce0964 684 if (last_sp_adjust == 0 && no_unhandled_cfa (last_sp_set))
3825692d
RH
685 {
686 maybe_move_args_size_note (insn, last_sp_set, true);
3dce0964 687 maybe_merge_cfa_adjust (insn, last_sp_set, true);
3825692d
RH
688 delete_insn (last_sp_set);
689 }
690 else
691 last2_sp_set = last_sp_set;
692 }
90588a10
JJ
693 free_csa_reflist (reflist);
694 reflist = NULL;
c7a0240a
SB
695 last_sp_set = insn;
696 last_sp_adjust = this_adjust;
d44f14cc
JJ
697 if (REG_P (src))
698 {
699 if (copy == NULL)
700 copy = BITMAP_ALLOC (&reg_obstack);
701 last_sp_live = copy;
702 bitmap_copy (last_sp_live, live);
703 }
704 else
705 last_sp_live = NULL;
706 df_simulate_one_insn_forwards (bb, insn, live);
c7a0240a
SB
707 continue;
708 }
709
1362aa31
EB
710 /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
711 the previous adjustment and turn it into a simple store. This
712 is equivalent to anticipating the stack adjustment so this must
713 be an allocation. */
714 if (MEM_P (dest)
715 && ((STACK_GROWS_DOWNWARD
716 ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
cf098191
RS
717 && known_eq (last_sp_adjust,
718 GET_MODE_SIZE (GET_MODE (dest))))
1362aa31 719 : (GET_CODE (XEXP (dest, 0)) == PRE_INC
cf098191
RS
720 && known_eq (-last_sp_adjust,
721 GET_MODE_SIZE (GET_MODE (dest)))))
1362aa31
EB
722 || ((STACK_GROWS_DOWNWARD
723 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
724 && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
c7a0240a 725 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
1362aa31
EB
726 && XEXP (XEXP (XEXP (dest, 0), 1), 0)
727 == stack_pointer_rtx
728 && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
729 == CONST_INT
730 && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
731 == -last_sp_adjust))
c7a0240a 732 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
1362aa31 733 && !reg_mentioned_p (stack_pointer_rtx, src)
c7a0240a 734 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
1362aa31 735 && try_apply_stack_adjustment (insn, reflist, 0,
d44f14cc
JJ
736 -last_sp_adjust,
737 NULL, NULL))
c7a0240a 738 {
3825692d
RH
739 if (last2_sp_set)
740 maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
741 else
742 maybe_move_args_size_note (insn, last_sp_set, true);
c7a0240a 743 delete_insn (last_sp_set);
90588a10
JJ
744 free_csa_reflist (reflist);
745 reflist = NULL;
71e88baf 746 last_sp_set = NULL;
c7a0240a 747 last_sp_adjust = 0;
d44f14cc
JJ
748 last_sp_live = NULL;
749 df_simulate_one_insn_forwards (bb, insn, live);
c7a0240a
SB
750 continue;
751 }
752 }
753
d44f14cc
JJ
754 if (!CALL_P (insn) && last_sp_set && record_stack_refs (insn, &reflist))
755 {
756 df_simulate_one_insn_forwards (bb, insn, live);
757 continue;
758 }
c7a0240a
SB
759
760 /* Otherwise, we were not able to process the instruction.
761 Do not continue collecting data across such a one. */
762 if (last_sp_set
763 && (CALL_P (insn)
764 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
765 {
766 if (last_sp_set && last_sp_adjust == 0)
3825692d
RH
767 {
768 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
769 delete_insn (last_sp_set);
770 }
90588a10
JJ
771 free_csa_reflist (reflist);
772 reflist = NULL;
71e88baf
DM
773 last2_sp_set = NULL;
774 last_sp_set = NULL;
c7a0240a 775 last_sp_adjust = 0;
d44f14cc 776 last_sp_live = NULL;
c7a0240a 777 }
d44f14cc
JJ
778
779 df_simulate_one_insn_forwards (bb, insn, live);
c7a0240a
SB
780 }
781
782 if (last_sp_set && last_sp_adjust == 0)
3825692d
RH
783 {
784 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
785 delete_insn (last_sp_set);
786 }
c7a0240a 787
90588a10
JJ
788 if (reflist)
789 free_csa_reflist (reflist);
d44f14cc
JJ
790 if (copy)
791 BITMAP_FREE (copy);
c7a0240a
SB
792}
793\f
11a687e7
RH
794static unsigned int
795rest_of_handle_stack_adjustments (void)
796{
797 df_note_add_problem ();
798 df_analyze ();
799 combine_stack_adjustments ();
c7a0240a
SB
800 return 0;
801}
802
27a4cd48
DM
803namespace {
804
805const pass_data pass_data_stack_adjustments =
c7a0240a 806{
27a4cd48
DM
807 RTL_PASS, /* type */
808 "csa", /* name */
809 OPTGROUP_NONE, /* optinfo_flags */
27a4cd48
DM
810 TV_COMBINE_STACK_ADJUST, /* tv_id */
811 0, /* properties_required */
812 0, /* properties_provided */
813 0, /* properties_destroyed */
814 0, /* todo_flags_start */
3bea341f 815 TODO_df_finish, /* todo_flags_finish */
c7a0240a 816};
27a4cd48
DM
817
818class pass_stack_adjustments : public rtl_opt_pass
819{
820public:
c3284718
RS
821 pass_stack_adjustments (gcc::context *ctxt)
822 : rtl_opt_pass (pass_data_stack_adjustments, ctxt)
27a4cd48
DM
823 {}
824
825 /* opt_pass methods: */
1a3d085c 826 virtual bool gate (function *);
be55bfe6
TS
827 virtual unsigned int execute (function *)
828 {
829 return rest_of_handle_stack_adjustments ();
830 }
27a4cd48
DM
831
832}; // class pass_stack_adjustments
833
1a3d085c
TS
834bool
835pass_stack_adjustments::gate (function *)
836{
837 /* This is kind of a heuristic. We need to run combine_stack_adjustments
838 even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
839 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
840 push instructions will have popping returns. */
841#ifndef PUSH_ROUNDING
842 if (ACCUMULATE_OUTGOING_ARGS)
843 return false;
844#endif
845 return flag_combine_stack_adjustments;
846}
847
27a4cd48
DM
848} // anon namespace
849
850rtl_opt_pass *
851make_pass_stack_adjustments (gcc::context *ctxt)
852{
853 return new pass_stack_adjustments (ctxt);
854}