]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/combine-stack-adj.c
c-pch.c (get_ident): Avoid C++ keywords.
[thirdparty/gcc.git] / gcc / combine-stack-adj.c
1 /* Combine stack adjustments.
2 Copyright (C) 1987, 1988, 1989, 1992, 1993, 1994, 1995, 1996, 1997,
3 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008
4 Free Software Foundation, Inc.
5
6 This file is part of GCC.
7
8 GCC is free software; you can redistribute it and/or modify it under
9 the terms of the GNU General Public License as published by the Free
10 Software Foundation; either version 3, or (at your option) any later
11 version.
12
13 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
14 WARRANTY; without even the implied warranty of MERCHANTABILITY or
15 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 for more details.
17
18 You should have received a copy of the GNU General Public License
19 along with GCC; see the file COPYING3. If not see
20 <http://www.gnu.org/licenses/>. */
21
22 /* Track stack adjustments and stack memory references. Attempt to
23 reduce the number of stack adjustments by back-propagating across
24 the memory references.
25
26 This is intended primarily for use with targets that do not define
27 ACCUMULATE_OUTGOING_ARGS. It is of significantly more value to
28 targets that define PREFERRED_STACK_BOUNDARY more aligned than
29 STACK_BOUNDARY (e.g. x86), or if not all registers can be pushed
30 (e.g. x86 fp regs) which would ordinarily have to be implemented
31 as a sub/mov pair due to restrictions in calls.c.
32
33 Propagation stops when any of the insns that need adjusting are
34 (a) no longer valid because we've exceeded their range, (b) a
35 non-trivial push instruction, or (c) a call instruction.
36
37 Restriction B is based on the assumption that push instructions
38 are smaller or faster. If a port really wants to remove all
39 pushes, it should have defined ACCUMULATE_OUTGOING_ARGS. The
40 one exception that is made is for an add immediately followed
41 by a push. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "rtl.h"
48 #include "tm_p.h"
49 #include "insn-config.h"
50 #include "recog.h"
51 #include "output.h"
52 #include "regs.h"
53 #include "hard-reg-set.h"
54 #include "flags.h"
55 #include "function.h"
56 #include "expr.h"
57 #include "basic-block.h"
58 #include "df.h"
59 #include "except.h"
60 #include "toplev.h"
61 #include "reload.h"
62 #include "timevar.h"
63 #include "tree-pass.h"
64
65 \f
66 /* Turn STACK_GROWS_DOWNWARD into a boolean. */
67 #ifdef STACK_GROWS_DOWNWARD
68 #undef STACK_GROWS_DOWNWARD
69 #define STACK_GROWS_DOWNWARD 1
70 #else
71 #define STACK_GROWS_DOWNWARD 0
72 #endif
73
74 /* This structure records stack memory references between stack adjusting
75 instructions. */
76
77 struct csa_memlist
78 {
79 HOST_WIDE_INT sp_offset;
80 rtx insn, *mem;
81 struct csa_memlist *next;
82 };
83
84 static int stack_memref_p (rtx);
85 static rtx single_set_for_csa (rtx);
86 static void free_csa_memlist (struct csa_memlist *);
87 static struct csa_memlist *record_one_stack_memref (rtx, rtx *,
88 struct csa_memlist *);
89 static int try_apply_stack_adjustment (rtx, struct csa_memlist *,
90 HOST_WIDE_INT, HOST_WIDE_INT);
91 static void combine_stack_adjustments_for_block (basic_block);
92 static int record_stack_memrefs (rtx *, void *);
93
94
95 /* Main entry point for stack adjustment combination. */
96
97 static void
98 combine_stack_adjustments (void)
99 {
100 basic_block bb;
101
102 FOR_EACH_BB (bb)
103 combine_stack_adjustments_for_block (bb);
104 }
105
106 /* Recognize a MEM of the form (sp) or (plus sp const). */
107
108 static int
109 stack_memref_p (rtx x)
110 {
111 if (!MEM_P (x))
112 return 0;
113 x = XEXP (x, 0);
114
115 if (x == stack_pointer_rtx)
116 return 1;
117 if (GET_CODE (x) == PLUS
118 && XEXP (x, 0) == stack_pointer_rtx
119 && GET_CODE (XEXP (x, 1)) == CONST_INT)
120 return 1;
121
122 return 0;
123 }
124
125 /* Recognize either normal single_set or the hack in i386.md for
126 tying fp and sp adjustments. */
127
128 static rtx
129 single_set_for_csa (rtx insn)
130 {
131 int i;
132 rtx tmp = single_set (insn);
133 if (tmp)
134 return tmp;
135
136 if (!NONJUMP_INSN_P (insn)
137 || GET_CODE (PATTERN (insn)) != PARALLEL)
138 return NULL_RTX;
139
140 tmp = PATTERN (insn);
141 if (GET_CODE (XVECEXP (tmp, 0, 0)) != SET)
142 return NULL_RTX;
143
144 for (i = 1; i < XVECLEN (tmp, 0); ++i)
145 {
146 rtx this_rtx = XVECEXP (tmp, 0, i);
147
148 /* The special case is allowing a no-op set. */
149 if (GET_CODE (this_rtx) == SET
150 && SET_SRC (this_rtx) == SET_DEST (this_rtx))
151 ;
152 else if (GET_CODE (this_rtx) != CLOBBER
153 && GET_CODE (this_rtx) != USE)
154 return NULL_RTX;
155 }
156
157 return XVECEXP (tmp, 0, 0);
158 }
159
160 /* Free the list of csa_memlist nodes. */
161
162 static void
163 free_csa_memlist (struct csa_memlist *memlist)
164 {
165 struct csa_memlist *next;
166 for (; memlist ; memlist = next)
167 {
168 next = memlist->next;
169 free (memlist);
170 }
171 }
172
173 /* Create a new csa_memlist node from the given memory reference.
174 It is already known that the memory is stack_memref_p. */
175
176 static struct csa_memlist *
177 record_one_stack_memref (rtx insn, rtx *mem, struct csa_memlist *next_memlist)
178 {
179 struct csa_memlist *ml;
180
181 ml = XNEW (struct csa_memlist);
182
183 if (XEXP (*mem, 0) == stack_pointer_rtx)
184 ml->sp_offset = 0;
185 else
186 ml->sp_offset = INTVAL (XEXP (XEXP (*mem, 0), 1));
187
188 ml->insn = insn;
189 ml->mem = mem;
190 ml->next = next_memlist;
191
192 return ml;
193 }
194
195 /* Attempt to apply ADJUST to the stack adjusting insn INSN, as well
196 as each of the memories in MEMLIST. Return true on success. */
197
198 static int
199 try_apply_stack_adjustment (rtx insn, struct csa_memlist *memlist, HOST_WIDE_INT new_adjust,
200 HOST_WIDE_INT delta)
201 {
202 struct csa_memlist *ml;
203 rtx set;
204
205 set = single_set_for_csa (insn);
206 validate_change (insn, &XEXP (SET_SRC (set), 1), GEN_INT (new_adjust), 1);
207
208 for (ml = memlist; ml ; ml = ml->next)
209 validate_change
210 (ml->insn, ml->mem,
211 replace_equiv_address_nv (*ml->mem,
212 plus_constant (stack_pointer_rtx,
213 ml->sp_offset - delta)), 1);
214
215 if (apply_change_group ())
216 {
217 /* Succeeded. Update our knowledge of the memory references. */
218 for (ml = memlist; ml ; ml = ml->next)
219 ml->sp_offset -= delta;
220
221 return 1;
222 }
223 else
224 return 0;
225 }
226
227 /* Called via for_each_rtx and used to record all stack memory references in
228 the insn and discard all other stack pointer references. */
229 struct record_stack_memrefs_data
230 {
231 rtx insn;
232 struct csa_memlist *memlist;
233 };
234
235 static int
236 record_stack_memrefs (rtx *xp, void *data)
237 {
238 rtx x = *xp;
239 struct record_stack_memrefs_data *d =
240 (struct record_stack_memrefs_data *) data;
241 if (!x)
242 return 0;
243 switch (GET_CODE (x))
244 {
245 case MEM:
246 if (!reg_mentioned_p (stack_pointer_rtx, x))
247 return -1;
248 /* We are not able to handle correctly all possible memrefs containing
249 stack pointer, so this check is necessary. */
250 if (stack_memref_p (x))
251 {
252 d->memlist = record_one_stack_memref (d->insn, xp, d->memlist);
253 return -1;
254 }
255 return 1;
256 case REG:
257 /* ??? We want be able to handle non-memory stack pointer
258 references later. For now just discard all insns referring to
259 stack pointer outside mem expressions. We would probably
260 want to teach validate_replace to simplify expressions first.
261
262 We can't just compare with STACK_POINTER_RTX because the
263 reference to the stack pointer might be in some other mode.
264 In particular, an explicit clobber in an asm statement will
265 result in a QImode clobber. */
266 if (REGNO (x) == STACK_POINTER_REGNUM)
267 return 1;
268 break;
269 default:
270 break;
271 }
272 return 0;
273 }
274
275 /* Adjust or create REG_FRAME_RELATED_EXPR note when merging a stack
276 adjustment into a frame related insn. */
277
278 static void
279 adjust_frame_related_expr (rtx last_sp_set, rtx insn,
280 HOST_WIDE_INT this_adjust)
281 {
282 rtx note = find_reg_note (last_sp_set, REG_FRAME_RELATED_EXPR, NULL_RTX);
283 rtx new_expr = NULL_RTX;
284
285 if (note == NULL_RTX && RTX_FRAME_RELATED_P (insn))
286 return;
287
288 if (note
289 && GET_CODE (XEXP (note, 0)) == SEQUENCE
290 && XVECLEN (XEXP (note, 0), 0) >= 2)
291 {
292 rtx expr = XEXP (note, 0);
293 rtx last = XVECEXP (expr, 0, XVECLEN (expr, 0) - 1);
294 int i;
295
296 if (GET_CODE (last) == SET
297 && RTX_FRAME_RELATED_P (last) == RTX_FRAME_RELATED_P (insn)
298 && SET_DEST (last) == stack_pointer_rtx
299 && GET_CODE (SET_SRC (last)) == PLUS
300 && XEXP (SET_SRC (last), 0) == stack_pointer_rtx
301 && GET_CODE (XEXP (SET_SRC (last), 1)) == CONST_INT)
302 {
303 XEXP (SET_SRC (last), 1)
304 = GEN_INT (INTVAL (XEXP (SET_SRC (last), 1)) + this_adjust);
305 return;
306 }
307
308 new_expr = gen_rtx_SEQUENCE (VOIDmode,
309 rtvec_alloc (XVECLEN (expr, 0) + 1));
310 for (i = 0; i < XVECLEN (expr, 0); i++)
311 XVECEXP (new_expr, 0, i) = XVECEXP (expr, 0, i);
312 }
313 else
314 {
315 new_expr = gen_rtx_SEQUENCE (VOIDmode, rtvec_alloc (2));
316 if (note)
317 XVECEXP (new_expr, 0, 0) = XEXP (note, 0);
318 else
319 {
320 rtx expr = copy_rtx (single_set_for_csa (last_sp_set));
321
322 XEXP (SET_SRC (expr), 1)
323 = GEN_INT (INTVAL (XEXP (SET_SRC (expr), 1)) - this_adjust);
324 RTX_FRAME_RELATED_P (expr) = 1;
325 XVECEXP (new_expr, 0, 0) = expr;
326 }
327 }
328
329 XVECEXP (new_expr, 0, XVECLEN (new_expr, 0) - 1)
330 = copy_rtx (single_set_for_csa (insn));
331 RTX_FRAME_RELATED_P (XVECEXP (new_expr, 0, XVECLEN (new_expr, 0) - 1))
332 = RTX_FRAME_RELATED_P (insn);
333 if (note)
334 XEXP (note, 0) = new_expr;
335 else
336 REG_NOTES (last_sp_set)
337 = gen_rtx_EXPR_LIST (REG_FRAME_RELATED_EXPR, new_expr,
338 REG_NOTES (last_sp_set));
339 }
340
341 /* Subroutine of combine_stack_adjustments, called for each basic block. */
342
343 static void
344 combine_stack_adjustments_for_block (basic_block bb)
345 {
346 HOST_WIDE_INT last_sp_adjust = 0;
347 rtx last_sp_set = NULL_RTX;
348 struct csa_memlist *memlist = NULL;
349 rtx insn, next, set;
350 struct record_stack_memrefs_data data;
351 bool end_of_block = false;
352
353 for (insn = BB_HEAD (bb); !end_of_block ; insn = next)
354 {
355 end_of_block = insn == BB_END (bb);
356 next = NEXT_INSN (insn);
357
358 if (! INSN_P (insn))
359 continue;
360
361 set = single_set_for_csa (insn);
362 if (set)
363 {
364 rtx dest = SET_DEST (set);
365 rtx src = SET_SRC (set);
366
367 /* Find constant additions to the stack pointer. */
368 if (dest == stack_pointer_rtx
369 && GET_CODE (src) == PLUS
370 && XEXP (src, 0) == stack_pointer_rtx
371 && GET_CODE (XEXP (src, 1)) == CONST_INT)
372 {
373 HOST_WIDE_INT this_adjust = INTVAL (XEXP (src, 1));
374
375 /* If we've not seen an adjustment previously, record
376 it now and continue. */
377 if (! last_sp_set)
378 {
379 last_sp_set = insn;
380 last_sp_adjust = this_adjust;
381 continue;
382 }
383
384 /* If not all recorded memrefs can be adjusted, or the
385 adjustment is now too large for a constant addition,
386 we cannot merge the two stack adjustments.
387
388 Also we need to be careful to not move stack pointer
389 such that we create stack accesses outside the allocated
390 area. We can combine an allocation into the first insn,
391 or a deallocation into the second insn. We can not
392 combine an allocation followed by a deallocation.
393
394 The only somewhat frequent occurrence of the later is when
395 a function allocates a stack frame but does not use it.
396 For this case, we would need to analyze rtl stream to be
397 sure that allocated area is really unused. This means not
398 only checking the memory references, but also all registers
399 or global memory references possibly containing a stack
400 frame address.
401
402 Perhaps the best way to address this problem is to teach
403 gcc not to allocate stack for objects never used. */
404
405 /* Combine an allocation into the first instruction. */
406 if (STACK_GROWS_DOWNWARD ? this_adjust <= 0 : this_adjust >= 0)
407 {
408 if (try_apply_stack_adjustment (last_sp_set, memlist,
409 last_sp_adjust + this_adjust,
410 this_adjust))
411 {
412 if (RTX_FRAME_RELATED_P (last_sp_set))
413 adjust_frame_related_expr (last_sp_set, insn,
414 this_adjust);
415 /* It worked! */
416 delete_insn (insn);
417 last_sp_adjust += this_adjust;
418 continue;
419 }
420 }
421
422 /* Otherwise we have a deallocation. Do not combine with
423 a previous allocation. Combine into the second insn. */
424 else if (STACK_GROWS_DOWNWARD
425 ? last_sp_adjust >= 0 : last_sp_adjust <= 0)
426 {
427 if (try_apply_stack_adjustment (insn, memlist,
428 last_sp_adjust + this_adjust,
429 -last_sp_adjust))
430 {
431 /* It worked! */
432 delete_insn (last_sp_set);
433 last_sp_set = insn;
434 last_sp_adjust += this_adjust;
435 free_csa_memlist (memlist);
436 memlist = NULL;
437 continue;
438 }
439 }
440
441 /* Combination failed. Restart processing from here. If
442 deallocation+allocation conspired to cancel, we can
443 delete the old deallocation insn. */
444 if (last_sp_set && last_sp_adjust == 0)
445 delete_insn (last_sp_set);
446 free_csa_memlist (memlist);
447 memlist = NULL;
448 last_sp_set = insn;
449 last_sp_adjust = this_adjust;
450 continue;
451 }
452
453 /* Find a predecrement of exactly the previous adjustment and
454 turn it into a direct store. Obviously we can't do this if
455 there were any intervening uses of the stack pointer. */
456 if (memlist == NULL
457 && MEM_P (dest)
458 && ((GET_CODE (XEXP (dest, 0)) == PRE_DEC
459 && (last_sp_adjust
460 == (HOST_WIDE_INT) GET_MODE_SIZE (GET_MODE (dest))))
461 || (GET_CODE (XEXP (dest, 0)) == PRE_MODIFY
462 && GET_CODE (XEXP (XEXP (dest, 0), 1)) == PLUS
463 && XEXP (XEXP (XEXP (dest, 0), 1), 0) == stack_pointer_rtx
464 && (GET_CODE (XEXP (XEXP (XEXP (dest, 0), 1), 1))
465 == CONST_INT)
466 && (INTVAL (XEXP (XEXP (XEXP (dest, 0), 1), 1))
467 == -last_sp_adjust)))
468 && XEXP (XEXP (dest, 0), 0) == stack_pointer_rtx
469 && ! reg_mentioned_p (stack_pointer_rtx, src)
470 && memory_address_p (GET_MODE (dest), stack_pointer_rtx)
471 && validate_change (insn, &SET_DEST (set),
472 replace_equiv_address (dest,
473 stack_pointer_rtx),
474 0))
475 {
476 delete_insn (last_sp_set);
477 free_csa_memlist (memlist);
478 memlist = NULL;
479 last_sp_set = NULL_RTX;
480 last_sp_adjust = 0;
481 continue;
482 }
483 }
484
485 data.insn = insn;
486 data.memlist = memlist;
487 if (!CALL_P (insn) && last_sp_set
488 && !for_each_rtx (&PATTERN (insn), record_stack_memrefs, &data))
489 {
490 memlist = data.memlist;
491 continue;
492 }
493 memlist = data.memlist;
494
495 /* Otherwise, we were not able to process the instruction.
496 Do not continue collecting data across such a one. */
497 if (last_sp_set
498 && (CALL_P (insn)
499 || reg_mentioned_p (stack_pointer_rtx, PATTERN (insn))))
500 {
501 if (last_sp_set && last_sp_adjust == 0)
502 delete_insn (last_sp_set);
503 free_csa_memlist (memlist);
504 memlist = NULL;
505 last_sp_set = NULL_RTX;
506 last_sp_adjust = 0;
507 }
508 }
509
510 if (last_sp_set && last_sp_adjust == 0)
511 delete_insn (last_sp_set);
512
513 if (memlist)
514 free_csa_memlist (memlist);
515 }
516 \f
517
518 static bool
519 gate_handle_stack_adjustments (void)
520 {
521 return (optimize > 0);
522 }
523
524 static unsigned int
525 rest_of_handle_stack_adjustments (void)
526 {
527 cleanup_cfg (flag_crossjumping ? CLEANUP_CROSSJUMP : 0);
528
529 /* This is kind of a heuristic. We need to run combine_stack_adjustments
530 even for machines with possibly nonzero RETURN_POPS_ARGS
531 and ACCUMULATE_OUTGOING_ARGS. We expect that only ports having
532 push instructions will have popping returns. */
533 #ifndef PUSH_ROUNDING
534 if (!ACCUMULATE_OUTGOING_ARGS)
535 #endif
536 {
537 df_note_add_problem ();
538 df_analyze ();
539 combine_stack_adjustments ();
540 }
541 return 0;
542 }
543
544 struct rtl_opt_pass pass_stack_adjustments =
545 {
546 {
547 RTL_PASS,
548 "csa", /* name */
549 gate_handle_stack_adjustments, /* gate */
550 rest_of_handle_stack_adjustments, /* execute */
551 NULL, /* sub */
552 NULL, /* next */
553 0, /* static_pass_number */
554 0, /* tv_id */
555 0, /* properties_required */
556 0, /* properties_provided */
557 0, /* properties_destroyed */
558 0, /* todo_flags_start */
559 TODO_df_finish | TODO_verify_rtl_sharing |
560 TODO_dump_func |
561 TODO_ggc_collect, /* todo_flags_finish */
562 }
563 };
564