]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/combine-stack-adj.c
re PR c/43772 (Errant -Wlogical-op warning when testing limits)
[thirdparty/gcc.git] / gcc / combine-stack-adj.c
CommitLineData
c7a0240a 1/* Combine stack adjustments.
6fb5fa3c 2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
7c475d11
JM
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009,
4 2010 Free Software Foundation, Inc.
c7a0240a
SB
5
6This file is part of GCC.
7
8GCC is free software; you can redistribute it and/or modify it under
9the terms of the GNU General Public License as published by the Free
9dcd6f09 10Software Foundation; either version 3, or (at your option) any later
c7a0240a
SB
11version.
12
13GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14WARRANTY; without even the implied warranty of MERCHANTABILITY or
15FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16for more details.
17
18You should have received a copy of the GNU General Public License
9dcd6f09
NC
19along with GCC; see the file COPYING3. If not see
20<http://www.gnu.org/licenses/>. */
c7a0240a
SB
21
22/* Track stack adjustments and stack memory references. Attempt to
23 reduce the number of stack adjustments by back-propagating across
24 the memory references.
25
26 This is intended primarily for use with targets that do not define
27 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
28 targets that define PREFERRED_STACK_BOUNDARY more aligned than
29 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
30 (e.g. x86 fp regs) which would ordinarily have to be implemented
31 as a sub/mov pair due to restrictions in calls.c.
32
33 Propagation stops when any of the insns that need adjusting are
34 (a) no longer valid because we've exceeded their range, (b) a
35 non-trivial push instruction, or (c) a call instruction.
36
37 Restriction B is based on the assumption that push instructions
38 are smaller or faster. If a port really wants to remove all
39 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
40 one exception that is made is for an add immediately followed
41 by a push. */
42
43#include "config.h"
44#include "system.h"
45#include "coretypes.h"
46#include "tm.h"
47#include "rtl.h"
48#include "tm_p.h"
49#include "insn-config.h"
50#include "recog.h"
51#include "output.h"
52#include "regs.h"
53#include "hard-reg-set.h"
54#include "flags.h"
55#include "function.h"
56#include "expr.h"
57#include "basic-block.h"
6fb5fa3c 58#include "df.h"
c7a0240a 59#include "except.h"
c7a0240a
SB
60#include "reload.h"
61#include "timevar.h"
62#include "tree-pass.h"
63
64\f
65/* Turn STACK_GROWS_DOWNWARD into a boolean. */
66#ifdef STACK_GROWS_DOWNWARD
67#undef STACK_GROWS_DOWNWARD
68#define STACK_GROWS_DOWNWARD 1
69#else
70#define STACK_GROWS_DOWNWARD 0
71#endif
72
90588a10
JJ
73/* This structure records two kinds of stack references between stack
74 adjusting instructions: stack references in memory addresses for
75 regular insns and all stack references for debug insns. */
c7a0240a 76
90588a10 77struct csa_reflist
c7a0240a
SB
78{
79 HOST_WIDE_INT sp_offset;
90588a10
JJ
80 rtx insn, *ref;
81 struct csa_reflist *next;
c7a0240a
SB
82};
83
84static int stack_memref_p (rtx);
85static rtx single_set_for_csa (rtx);
90588a10
JJ
86static void free_csa_reflist (struct csa_reflist *);
87static struct csa_reflist *record_one_stack_ref (rtx, rtx *,
88 struct csa_reflist *);
89static int try_apply_stack_adjustment (rtx, struct csa_reflist *,
c7a0240a
SB
90 HOST_WIDE_INT, HOST_WIDE_INT);
91static void combine_stack_adjustments_for_block (basic_block);
90588a10 92static int record_stack_refs (rtx *, void *);
c7a0240a
SB
93
94
95/* Main entry point for stack adjustment combination. */
96
97static void
98combine_stack_adjustments (void)
99{
100 basic_block bb;
101
102 FOR_EACH_BB (bb)
103 combine_stack_adjustments_for_block (bb);
104}
105
106/* Recognize a MEM of the form (sp) or (plus sp const). */
107
108static int
109stack_memref_p (rtx x)
110{
111 if (!MEM_P (x))
112 return 0;
113 x = XEXP (x, 0);
114
115 if (x == stack_pointer_rtx)
116 return 1;
117 if (GET_CODE (x) == PLUS
118 && XEXP (x, 0) == stack_pointer_rtx
481683e1 119 && CONST_INT_P (XEXP (x, 1)))
c7a0240a
SB
120 return 1;
121
122 return 0;
123}
124
125/* Recognize either normal single_set or the hack in i386.md for
126 tying fp and sp adjustments. */
127
128static rtx
129single_set_for_csa (rtx insn)
130{
131 int i;
132 rtx tmp = single_set (insn);
133 if (tmp)
134 return tmp;
135
136 if (!NONJUMP_INSN_P (insn)
137 || GET_CODE (PATTERN (insn)) != PARALLEL)
138 return NULL_RTX;
139
140 tmp = PATTERN (insn);
141 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
142 return NULL_RTX;
143
144 for (i = 1; i < XVECLEN (tmp, 0); ++i)
145 {
48c54229 146 rtx this_rtx = XVECEXP (tmp, 0, i);
c7a0240a
SB
147
148 /* The special case is allowing a no-op set. */
48c54229
KG
149 if (GET_CODE (this_rtx) == SET
150 && SET_SRC (this_rtx) == SET_DEST (this_rtx))
c7a0240a 151 ;
48c54229
KG
152 else if (GET_CODE (this_rtx) != CLOBBER
153 && GET_CODE (this_rtx) != USE)
c7a0240a
SB
154 return NULL_RTX;
155 }
156
157 return XVECEXP (tmp, 0, 0);
158}
159
90588a10 160/* Free the list of csa_reflist nodes. */
c7a0240a
SB
161
162static void
90588a10 163free_csa_reflist (struct csa_reflist *reflist)
c7a0240a 164{
90588a10
JJ
165 struct csa_reflist *next;
166 for (; reflist ; reflist = next)
c7a0240a 167 {
90588a10
JJ
168 next = reflist->next;
169 free (reflist);
c7a0240a
SB
170 }
171}
172
90588a10
JJ
173/* Create a new csa_reflist node from the given stack reference.
174 It is already known that the reference is either a MEM satisfying the
175 predicate stack_memref_p or a REG representing the stack pointer. */
c7a0240a 176
90588a10
JJ
177static struct csa_reflist *
178record_one_stack_ref (rtx insn, rtx *ref, struct csa_reflist *next_reflist)
c7a0240a 179{
90588a10 180 struct csa_reflist *ml;
c7a0240a 181
90588a10 182 ml = XNEW (struct csa_reflist);
c7a0240a 183
90588a10 184 if (REG_P (*ref) || XEXP (*ref, 0) == stack_pointer_rtx)
c7a0240a
SB
185 ml->sp_offset = 0;
186 else
90588a10 187 ml->sp_offset = INTVAL (XEXP (XEXP (*ref, 0), 1));
c7a0240a
SB
188
189 ml->insn = insn;
90588a10
JJ
190 ml->ref = ref;
191 ml->next = next_reflist;
c7a0240a
SB
192
193 return ml;
194}
195
196/* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
90588a10
JJ
197 as each of the memories and stack references in REFLIST. Return true
198 on success. */
c7a0240a
SB
199
200static int
90588a10
JJ
201try_apply_stack_adjustment (rtx insn, struct csa_reflist *reflist,
202 HOST_WIDE_INT new_adjust, HOST_WIDE_INT delta)
c7a0240a 203{
90588a10 204 struct csa_reflist *ml;
c7a0240a
SB
205 rtx set;
206
207 set = single_set_for_csa (insn);
1362aa31
EB
208 if (MEM_P (SET_DEST (set)))
209 validate_change (insn, &SET_DEST (set),
210 replace_equiv_address (SET_DEST (set), stack_pointer_rtx),
211 1);
212 else
213 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
c7a0240a 214
90588a10
JJ
215 for (ml = reflist; ml ; ml = ml->next)
216 {
217 rtx new_addr = plus_constant (stack_pointer_rtx, ml->sp_offset - delta);
218 rtx new_val;
219
220 if (MEM_P (*ml->ref))
221 new_val = replace_equiv_address_nv (*ml->ref, new_addr);
222 else if (GET_MODE (*ml->ref) == GET_MODE (stack_pointer_rtx))
223 new_val = new_addr;
224 else
225 new_val = lowpart_subreg (GET_MODE (*ml->ref), new_addr,
226 GET_MODE (new_addr));
227 validate_change (ml->insn, ml->ref, new_val, 1);
228 }
c7a0240a
SB
229
230 if (apply_change_group ())
231 {
90588a10
JJ
232 /* Succeeded. Update our knowledge of the stack references. */
233 for (ml = reflist; ml ; ml = ml->next)
c7a0240a
SB
234 ml->sp_offset -= delta;
235
236 return 1;
237 }
238 else
239 return 0;
240}
241
90588a10
JJ
242/* Called via for_each_rtx and used to record all stack memory and other
243 references in the insn and discard all other stack pointer references. */
244struct record_stack_refs_data
c7a0240a
SB
245{
246 rtx insn;
90588a10 247 struct csa_reflist *reflist;
c7a0240a
SB
248};
249
250static int
90588a10 251record_stack_refs (rtx *xp, void *data)
c7a0240a
SB
252{
253 rtx x = *xp;
90588a10
JJ
254 struct record_stack_refs_data *d =
255 (struct record_stack_refs_data *) data;
c7a0240a
SB
256 if (!x)
257 return 0;
258 switch (GET_CODE (x))
259 {
260 case MEM:
261 if (!reg_mentioned_p (stack_pointer_rtx, x))
262 return -1;
263 /* We are not able to handle correctly all possible memrefs containing
264 stack pointer, so this check is necessary. */
265 if (stack_memref_p (x))
266 {
90588a10 267 d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
c7a0240a
SB
268 return -1;
269 }
90588a10
JJ
270 /* Try harder for DEBUG_INSNs, handle e.g. (mem (mem (sp + 16) + 4). */
271 return !DEBUG_INSN_P (d->insn);
c7a0240a
SB
272 case REG:
273 /* ??? We want be able to handle non-memory stack pointer
274 references later. For now just discard all insns referring to
275 stack pointer outside mem expressions. We would probably
276 want to teach validate_replace to simplify expressions first.
277
278 We can't just compare with STACK_POINTER_RTX because the
279 reference to the stack pointer might be in some other mode.
280 In particular, an explicit clobber in an asm statement will
90588a10
JJ
281 result in a QImode clobber.
282
283 In DEBUG_INSNs, we want to replace all occurrences, otherwise
284 they will cause -fcompare-debug failures. */
c7a0240a 285 if (REGNO (x) == STACK_POINTER_REGNUM)
90588a10
JJ
286 {
287 if (!DEBUG_INSN_P (d->insn))
288 return 1;
289 d->reflist = record_one_stack_ref (d->insn, xp, d->reflist);
290 return -1;
291 }
c7a0240a
SB
292 break;
293 default:
294 break;
295 }
296 return 0;
297}
298
4efb91df
RH
299/* If INSN has a REG_ARGS_SIZE note, move it to LAST.
300 AFTER is true iff LAST follows INSN in the instruction stream. */
a182fb6b
JJ
301
302static void
4efb91df 303maybe_move_args_size_note (rtx last, rtx insn, bool after)
a182fb6b 304{
9a08d230 305 rtx note, last_note;
a182fb6b 306
9a08d230
RH
307 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
308 if (note == NULL)
a182fb6b
JJ
309 return;
310
9a08d230
RH
311 last_note = find_reg_note (last, REG_ARGS_SIZE, NULL_RTX);
312 if (last_note)
4efb91df
RH
313 {
314 /* The ARGS_SIZE notes are *not* cumulative. They represent an
315 absolute value, and the "most recent" note wins. */
316 if (!after)
317 XEXP (last_note, 0) = XEXP (note, 0);
318 }
a182fb6b 319 else
9a08d230 320 add_reg_note (last, REG_ARGS_SIZE, XEXP (note, 0));
a182fb6b
JJ
321}
322
3825692d
RH
323/* Return the next (or previous) active insn within BB. */
324
325static rtx
326prev_active_insn_bb (basic_block bb, rtx insn)
327{
328 for (insn = PREV_INSN (insn);
329 insn != PREV_INSN (BB_HEAD (bb));
330 insn = PREV_INSN (insn))
331 if (active_insn_p (insn))
332 return insn;
333 return NULL_RTX;
334}
335
336static rtx
337next_active_insn_bb (basic_block bb, rtx insn)
338{
339 for (insn = NEXT_INSN (insn);
340 insn != NEXT_INSN (BB_END (bb));
341 insn = NEXT_INSN (insn))
342 if (active_insn_p (insn))
343 return insn;
344 return NULL_RTX;
345}
346
347/* If INSN has a REG_ARGS_SIZE note, if possible move it to PREV. Otherwise
348 search for a nearby candidate within BB where we can stick the note. */
349
350static void
351force_move_args_size_note (basic_block bb, rtx prev, rtx insn)
352{
353 rtx note, test, next_candidate, prev_candidate;
354
355 /* If PREV exists, tail-call to the logic in the other function. */
356 if (prev)
357 {
358 maybe_move_args_size_note (prev, insn, false);
359 return;
360 }
361
362 /* First, make sure there's anything that needs doing. */
363 note = find_reg_note (insn, REG_ARGS_SIZE, NULL_RTX);
364 if (note == NULL)
365 return;
366
367 /* We need to find a spot between the previous and next exception points
368 where we can place the note and "properly" deallocate the arguments. */
369 next_candidate = prev_candidate = NULL;
370
371 /* It is often the case that we have insns in the order:
372 call
373 add sp (previous deallocation)
374 sub sp (align for next arglist)
375 push arg
376 and the add/sub cancel. Therefore we begin by searching forward. */
377
378 test = insn;
379 while ((test = next_active_insn_bb (bb, test)) != NULL)
380 {
381 /* Found an existing note: nothing to do. */
382 if (find_reg_note (test, REG_ARGS_SIZE, NULL_RTX))
383 return;
384 /* Found something that affects unwinding. Stop searching. */
385 if (CALL_P (test) || !insn_nothrow_p (test))
386 break;
387 if (next_candidate == NULL)
388 next_candidate = test;
389 }
390
391 test = insn;
392 while ((test = prev_active_insn_bb (bb, test)) != NULL)
393 {
394 rtx tnote;
395 /* Found a place that seems logical to adjust the stack. */
396 tnote = find_reg_note (test, REG_ARGS_SIZE, NULL_RTX);
397 if (tnote)
398 {
399 XEXP (tnote, 0) = XEXP (note, 0);
400 return;
401 }
402 if (prev_candidate == NULL)
403 prev_candidate = test;
404 /* Found something that affects unwinding. Stop searching. */
405 if (CALL_P (test) || !insn_nothrow_p (test))
406 break;
407 }
408
409 if (prev_candidate)
410 test = prev_candidate;
411 else if (next_candidate)
412 test = next_candidate;
413 else
414 {
415 /* ??? We *must* have a place, lest we ICE on the lost adjustment.
416 Options are: dummy clobber insn, nop, or prevent the removal of
417 the sp += 0 insn. Defer that decision until we can prove this
418 can actually happen. */
419 gcc_unreachable ();
420 }
421 add_reg_note (test, REG_ARGS_SIZE, XEXP (note, 0));
422}
423
c7a0240a
SB
424/* Subroutine of combine_stack_adjustments, called for each basic block. */
425
426static void
427combine_stack_adjustments_for_block (basic_block bb)
428{
429 HOST_WIDE_INT last_sp_adjust = 0;
430 rtx last_sp_set = NULL_RTX;
3825692d 431 rtx last2_sp_set = NULL_RTX;
90588a10 432 struct csa_reflist *reflist = NULL;
c7a0240a 433 rtx insn, next, set;
90588a10 434 struct record_stack_refs_data data;
c7a0240a
SB
435 bool end_of_block = false;
436
437 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
438 {
439 end_of_block = insn == BB_END (bb);
440 next = NEXT_INSN (insn);
441
442 if (! INSN_P (insn))
443 continue;
444
445 set = single_set_for_csa (insn);
446 if (set)
447 {
448 rtx dest = SET_DEST (set);
449 rtx src = SET_SRC (set);
450
451 /* Find constant additions to the stack pointer. */
452 if (dest == stack_pointer_rtx
453 && GET_CODE (src) == PLUS
454 && XEXP (src, 0) == stack_pointer_rtx
481683e1 455 && CONST_INT_P (XEXP (src, 1)))
c7a0240a
SB
456 {
457 HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
458
459 /* If we've not seen an adjustment previously, record
460 it now and continue. */
461 if (! last_sp_set)
462 {
463 last_sp_set = insn;
464 last_sp_adjust = this_adjust;
465 continue;
466 }
467
90588a10 468 /* If not all recorded refs can be adjusted, or the
c7a0240a
SB
469 adjustment is now too large for a constant addition,
470 we cannot merge the two stack adjustments.
471
472 Also we need to be careful to not move stack pointer
473 such that we create stack accesses outside the allocated
474 area. We can combine an allocation into the first insn,
475 or a deallocation into the second insn. We can not
476 combine an allocation followed by a deallocation.
477
478 The only somewhat frequent occurrence of the later is when
479 a function allocates a stack frame but does not use it.
480 For this case, we would need to analyze rtl stream to be
481 sure that allocated area is really unused. This means not
482 only checking the memory references, but also all registers
483 or global memory references possibly containing a stack
484 frame address.
485
486 Perhaps the best way to address this problem is to teach
487 gcc not to allocate stack for objects never used. */
488
489 /* Combine an allocation into the first instruction. */
490 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
491 {
90588a10 492 if (try_apply_stack_adjustment (last_sp_set, reflist,
c7a0240a
SB
493 last_sp_adjust + this_adjust,
494 this_adjust))
495 {
496 /* It worked! */
3825692d 497 maybe_move_args_size_note (last_sp_set, insn, false);
c7a0240a
SB
498 delete_insn (insn);
499 last_sp_adjust += this_adjust;
500 continue;
501 }
502 }
503
504 /* Otherwise we have a deallocation. Do not combine with
505 a previous allocation. Combine into the second insn. */
506 else if (STACK_GROWS_DOWNWARD
507 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
508 {
90588a10 509 if (try_apply_stack_adjustment (insn, reflist,
c7a0240a
SB
510 last_sp_adjust + this_adjust,
511 -last_sp_adjust))
512 {
513 /* It worked! */
3825692d 514 maybe_move_args_size_note (insn, last_sp_set, true);
c7a0240a
SB
515 delete_insn (last_sp_set);
516 last_sp_set = insn;
517 last_sp_adjust += this_adjust;
90588a10
JJ
518 free_csa_reflist (reflist);
519 reflist = NULL;
c7a0240a
SB
520 continue;
521 }
522 }
523
524 /* Combination failed. Restart processing from here. If
525 deallocation+allocation conspired to cancel, we can
526 delete the old deallocation insn. */
3825692d
RH
527 if (last_sp_set)
528 {
529 if (last_sp_adjust == 0)
530 {
531 maybe_move_args_size_note (insn, last_sp_set, true);
532 delete_insn (last_sp_set);
533 }
534 else
535 last2_sp_set = last_sp_set;
536 }
90588a10
JJ
537 free_csa_reflist (reflist);
538 reflist = NULL;
c7a0240a
SB
539 last_sp_set = insn;
540 last_sp_adjust = this_adjust;
541 continue;
542 }
543
1362aa31
EB
544 /* Find a store with pre-(dec|inc)rement or pre-modify of exactly
545 the previous adjustment and turn it into a simple store. This
546 is equivalent to anticipating the stack adjustment so this must
547 be an allocation. */
548 if (MEM_P (dest)
549 && ((STACK_GROWS_DOWNWARD
550 ? (GET_CODE (XEXP (dest, 0)) == PRE_DEC
551 && last_sp_adjust
552 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest)))
553 : (GET_CODE (XEXP (dest, 0)) == PRE_INC
554 && last_sp_adjust
555 == -(HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
556 || ((STACK_GROWS_DOWNWARD
557 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
558 && GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
c7a0240a 559 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
1362aa31
EB
560 && XEXP (XEXP (XEXP (dest, 0), 1), 0)
561 == stack_pointer_rtx
562 && GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
563 == CONST_INT
564 && INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
565 == -last_sp_adjust))
c7a0240a 566 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
1362aa31 567 && !reg_mentioned_p (stack_pointer_rtx, src)
c7a0240a 568 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
1362aa31
EB
569 && try_apply_stack_adjustment (insn, reflist, 0,
570 -last_sp_adjust))
c7a0240a 571 {
3825692d
RH
572 if (last2_sp_set)
573 maybe_move_args_size_note (last2_sp_set, last_sp_set, false);
574 else
575 maybe_move_args_size_note (insn, last_sp_set, true);
c7a0240a 576 delete_insn (last_sp_set);
90588a10
JJ
577 free_csa_reflist (reflist);
578 reflist = NULL;
c7a0240a
SB
579 last_sp_set = NULL_RTX;
580 last_sp_adjust = 0;
581 continue;
582 }
583 }
584
585 data.insn = insn;
90588a10 586 data.reflist = reflist;
c7a0240a 587 if (!CALL_P (insn) && last_sp_set
90588a10 588 && !for_each_rtx (&PATTERN (insn), record_stack_refs, &data))
c7a0240a 589 {
90588a10 590 reflist = data.reflist;
c7a0240a
SB
591 continue;
592 }
90588a10 593 reflist = data.reflist;
c7a0240a
SB
594
595 /* Otherwise, we were not able to process the instruction.
596 Do not continue collecting data across such a one. */
597 if (last_sp_set
598 && (CALL_P (insn)
599 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
600 {
601 if (last_sp_set && last_sp_adjust == 0)
3825692d
RH
602 {
603 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
604 delete_insn (last_sp_set);
605 }
90588a10
JJ
606 free_csa_reflist (reflist);
607 reflist = NULL;
3825692d 608 last2_sp_set = NULL_RTX;
c7a0240a
SB
609 last_sp_set = NULL_RTX;
610 last_sp_adjust = 0;
611 }
612 }
613
614 if (last_sp_set && last_sp_adjust == 0)
3825692d
RH
615 {
616 force_move_args_size_note (bb, last2_sp_set, last_sp_set);
617 delete_insn (last_sp_set);
618 }
c7a0240a 619
90588a10
JJ
620 if (reflist)
621 free_csa_reflist (reflist);
c7a0240a
SB
622}
623\f
624
625static bool
626gate_handle_stack_adjustments (void)
627{
ccaeeafe 628 return flag_combine_stack_adjustments;
c7a0240a
SB
629}
630
631static unsigned int
632rest_of_handle_stack_adjustments (void)
633{
6fb5fa3c 634 cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
c7a0240a
SB
635
636 /* This is kind of a heuristic. We need to run combine_stack_adjustments
079e7538 637 even for machines with possibly nonzero TARGET_RETURN_POPS_ARGS
c7a0240a
SB
638 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
639 push instructions will have popping returns. */
640#ifndef PUSH_ROUNDING
641 if (!ACCUMULATE_OUTGOING_ARGS)
642#endif
6fb5fa3c
DB
643 {
644 df_note_add_problem ();
645 df_analyze ();
646 combine_stack_adjustments ();
647 }
c7a0240a
SB
648 return 0;
649}
650
8ddbbcae 651struct rtl_opt_pass pass_stack_adjustments =
c7a0240a 652{
8ddbbcae
JH
653 {
654 RTL_PASS,
c7a0240a
SB
655 "csa", /* name */
656 gate_handle_stack_adjustments, /* gate */
657 rest_of_handle_stack_adjustments, /* execute */
658 NULL, /* sub */
659 NULL, /* next */
660 0, /* static_pass_number */
756b65f5 661 TV_COMBINE_STACK_ADJUST, /* tv_id */
c7a0240a
SB
662 0, /* properties_required */
663 0, /* properties_provided */
664 0, /* properties_destroyed */
665 0, /* todo_flags_start */
a36b8a1e 666 TODO_df_finish | TODO_verify_rtl_sharing |
c7a0240a 667 TODO_ggc_collect, /* todo_flags_finish */
8ddbbcae 668 }
c7a0240a 669};