]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR rtl-optimization/15187 (Inefficient if optimization with -O2 -ffast-math)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb
JH
1/* A pass for lowering trees to RTL.
2 Copyright (C) 2004 Free Software Foundation, Inc.
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
18the Free Software Foundation, 59 Temple Place - Suite 330,
19Boston, MA 02111-1307, USA. */
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
80c7a9eb
RH
40
41
1f6d3a08
RH
42#ifndef LOCAL_ALIGNMENT
43#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
44#endif
45
46#ifndef STACK_ALIGNMENT_NEEDED
47#define STACK_ALIGNMENT_NEEDED 1
48#endif
49
50#ifdef FRAME_GROWS_DOWNWARD
51# undef FRAME_GROWS_DOWNWARD
52# define FRAME_GROWS_DOWNWARD 1
53#else
54# define FRAME_GROWS_DOWNWARD 0
55#endif
56
57
58/* This structure holds data relevant to one variable that will be
59 placed in a stack slot. */
60struct stack_var
61{
62 /* The Variable. */
63 tree decl;
64
65 /* The offset of the variable. During partitioning, this is the
66 offset relative to the partition. After partitioning, this
67 is relative to the stack frame. */
68 HOST_WIDE_INT offset;
69
70 /* Initially, the size of the variable. Later, the size of the partition,
71 if this variable becomes it's partition's representative. */
72 HOST_WIDE_INT size;
73
74 /* The *byte* alignment required for this variable. Or as, with the
75 size, the alignment for this partition. */
76 unsigned int alignb;
77
78 /* The partition representative. */
79 size_t representative;
80
81 /* The next stack variable in the partition, or EOC. */
82 size_t next;
83};
84
85#define EOC ((size_t)-1)
86
87/* We have an array of such objects while deciding allocation. */
88static struct stack_var *stack_vars;
89static size_t stack_vars_alloc;
90static size_t stack_vars_num;
91
92/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
93 is non-decreasing. */
94static size_t *stack_vars_sorted;
95
96/* We have an interference graph between such objects. This graph
97 is lower triangular. */
98static bool *stack_vars_conflict;
99static size_t stack_vars_conflict_alloc;
100
101/* The phase of the stack frame. This is the known misalignment of
102 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
103 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
104static int frame_phase;
105
106
107/* Discover the byte alignment to use for DECL. Ignore alignment
108 we can't do with expected alignment of the stack boundary. */
109
110static unsigned int
111get_decl_align_unit (tree decl)
112{
113 unsigned int align;
114
115 align = DECL_ALIGN (decl);
116 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
117 if (align > PREFERRED_STACK_BOUNDARY)
118 align = PREFERRED_STACK_BOUNDARY;
119 if (cfun->stack_alignment_needed < align)
120 cfun->stack_alignment_needed = align;
121
122 return align / BITS_PER_UNIT;
123}
124
125/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
126 Return the frame offset. */
127
128static HOST_WIDE_INT
129alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
130{
131 HOST_WIDE_INT offset, new_frame_offset;
132
133 new_frame_offset = frame_offset;
134 if (FRAME_GROWS_DOWNWARD)
135 {
136 new_frame_offset -= size + frame_phase;
137 new_frame_offset &= -align;
138 new_frame_offset += frame_phase;
139 offset = new_frame_offset;
140 }
141 else
142 {
143 new_frame_offset -= frame_phase;
144 new_frame_offset += align - 1;
145 new_frame_offset &= -align;
146 new_frame_offset += frame_phase;
147 offset = new_frame_offset;
148 new_frame_offset += size;
149 }
150 frame_offset = new_frame_offset;
151
152 return offset;
153}
154
155/* Accumulate DECL into STACK_VARS. */
156
157static void
158add_stack_var (tree decl)
159{
160 if (stack_vars_num >= stack_vars_alloc)
161 {
162 if (stack_vars_alloc)
163 stack_vars_alloc = stack_vars_alloc * 3 / 2;
164 else
165 stack_vars_alloc = 32;
166 stack_vars
167 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
168 }
169 stack_vars[stack_vars_num].decl = decl;
170 stack_vars[stack_vars_num].offset = 0;
171 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
172 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
173
174 /* All variables are initially in their own partition. */
175 stack_vars[stack_vars_num].representative = stack_vars_num;
176 stack_vars[stack_vars_num].next = EOC;
177
178 /* Ensure that this decl doesn't get put onto the list twice. */
179 SET_DECL_RTL (decl, pc_rtx);
180
181 stack_vars_num++;
182}
183
184/* Compute the linear index of a lower-triangular coordinate (I, J). */
185
186static size_t
187triangular_index (size_t i, size_t j)
188{
189 if (i < j)
190 {
191 size_t t;
192 t = i, i = j, j = t;
193 }
194 return (i * (i + 1)) / 2 + j;
195}
196
197/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
198
199static void
200resize_stack_vars_conflict (size_t n)
201{
202 size_t size = triangular_index (n-1, n-1) + 1;
203
204 if (size <= stack_vars_conflict_alloc)
205 return;
206
207 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
208 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
209 (size - stack_vars_conflict_alloc) * sizeof (bool));
210 stack_vars_conflict_alloc = size;
211}
212
213/* Make the decls associated with luid's X and Y conflict. */
214
215static void
216add_stack_var_conflict (size_t x, size_t y)
217{
218 size_t index = triangular_index (x, y);
219 gcc_assert (index < stack_vars_conflict_alloc);
220 stack_vars_conflict[index] = true;
221}
222
223/* Check whether the decls associated with luid's X and Y conflict. */
224
225static bool
226stack_var_conflict_p (size_t x, size_t y)
227{
228 size_t index = triangular_index (x, y);
229 gcc_assert (index < stack_vars_conflict_alloc);
230 return stack_vars_conflict[index];
231}
232
233/* A subroutine of expand_used_vars. If two variables X and Y have alias
234 sets that do not conflict, then do add a conflict for these variables
235 in the interference graph. We also have to mind MEM_IN_STRUCT_P and
236 MEM_SCALAR_P. */
237
238static void
239add_alias_set_conflicts (void)
240{
241 size_t i, j, n = stack_vars_num;
242
243 for (i = 0; i < n; ++i)
244 {
245 bool aggr_i = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[i].decl));
246 HOST_WIDE_INT set_i = get_alias_set (stack_vars[i].decl);
247
248 for (j = 0; j < i; ++j)
249 {
250 bool aggr_j = AGGREGATE_TYPE_P (TREE_TYPE (stack_vars[j].decl));
251 HOST_WIDE_INT set_j = get_alias_set (stack_vars[j].decl);
252 if (aggr_i != aggr_j || !alias_sets_conflict_p (set_i, set_j))
253 add_stack_var_conflict (i, j);
254 }
255 }
256}
257
258/* A subroutine of partition_stack_vars. A comparison function for qsort,
259 sorting an array of indicies by the size of the object. */
260
261static int
262stack_var_size_cmp (const void *a, const void *b)
263{
264 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
265 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
266
267 if (sa < sb)
268 return -1;
269 if (sa > sb)
270 return 1;
271 return 0;
272}
273
274/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
275 partitioning algorithm. Partitions A and B are known to be non-conflicting.
276 Merge them into a single partition A.
277
278 At the same time, add OFFSET to all variables in partition B. At the end
279 of the partitioning process we've have a nice block easy to lay out within
280 the stack frame. */
281
282static void
283union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
284{
285 size_t i, last;
286
287 /* Update each element of partition B with the given offset,
288 and merge them into partition A. */
289 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
290 {
291 stack_vars[i].offset += offset;
292 stack_vars[i].representative = a;
293 }
294 stack_vars[last].next = stack_vars[a].next;
295 stack_vars[a].next = b;
296
297 /* Update the required alignment of partition A to account for B. */
298 if (stack_vars[a].alignb < stack_vars[b].alignb)
299 stack_vars[a].alignb = stack_vars[b].alignb;
300
301 /* Update the interference graph and merge the conflicts. */
302 for (last = stack_vars_num, i = 0; i < last; ++i)
303 if (stack_var_conflict_p (b, i))
304 add_stack_var_conflict (a, i);
305}
306
307/* A subroutine of expand_used_vars. Binpack the variables into
308 partitions constrained by the interference graph. The overall
309 algorithm used is as follows:
310
311 Sort the objects by size.
312 For each object A {
313 S = size(A)
314 O = 0
315 loop {
316 Look for the largest non-conflicting object B with size <= S.
317 UNION (A, B)
318 offset(B) = O
319 O += size(B)
320 S -= size(B)
321 }
322 }
323*/
324
325static void
326partition_stack_vars (void)
327{
328 size_t si, sj, n = stack_vars_num;
329
330 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
331 for (si = 0; si < n; ++si)
332 stack_vars_sorted[si] = si;
333
334 if (n == 1)
335 return;
336
337 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
338
339 /* Special case: detect when all variables conflict, and thus we can't
340 do anything during the partitioning loop. It isn't uncommon (with
341 C code at least) to declare all variables at the top of the function,
342 and if we're not inlining, then all variables will be in the same scope.
343 Take advantage of very fast libc routines for this scan. */
344 gcc_assert (sizeof(bool) == sizeof(char));
345 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
346 return;
347
348 for (si = 0; si < n; ++si)
349 {
350 size_t i = stack_vars_sorted[si];
351 HOST_WIDE_INT isize = stack_vars[i].size;
352 HOST_WIDE_INT offset = 0;
353
354 for (sj = si; sj-- > 0; )
355 {
356 size_t j = stack_vars_sorted[sj];
357 HOST_WIDE_INT jsize = stack_vars[j].size;
358 unsigned int jalign = stack_vars[j].alignb;
359
360 /* Ignore objects that aren't partition representatives. */
361 if (stack_vars[j].representative != j)
362 continue;
363
364 /* Ignore objects too large for the remaining space. */
365 if (isize < jsize)
366 continue;
367
368 /* Ignore conflicting objects. */
369 if (stack_var_conflict_p (i, j))
370 continue;
371
372 /* Refine the remaining space check to include alignment. */
373 if (offset & (jalign - 1))
374 {
375 HOST_WIDE_INT toff = offset;
376 toff += jalign - 1;
377 toff &= -(HOST_WIDE_INT)jalign;
378 if (isize - (toff - offset) < jsize)
379 continue;
380
381 isize -= toff - offset;
382 offset = toff;
383 }
384
385 /* UNION the objects, placing J at OFFSET. */
386 union_stack_vars (i, j, offset);
387
388 isize -= jsize;
389 if (isize == 0)
390 break;
391 }
392 }
393}
394
395/* A debugging aid for expand_used_vars. Dump the generated partitions. */
396
397static void
398dump_stack_var_partition (void)
399{
400 size_t si, i, j, n = stack_vars_num;
401
402 for (si = 0; si < n; ++si)
403 {
404 i = stack_vars_sorted[si];
405
406 /* Skip variables that aren't partition representatives, for now. */
407 if (stack_vars[i].representative != i)
408 continue;
409
410 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
411 " align %u\n", (unsigned long) i, stack_vars[i].size,
412 stack_vars[i].alignb);
413
414 for (j = i; j != EOC; j = stack_vars[j].next)
415 {
416 fputc ('\t', dump_file);
417 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
418 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
419 stack_vars[i].offset);
420 }
421 }
422}
423
424/* Assign rtl to DECL at frame offset OFFSET. */
425
426static void
427expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
428{
429 HOST_WIDE_INT align;
430 rtx x;
431
432 /* If this fails, we've overflowed the stack frame. Error nicely? */
433 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
434
435 x = plus_constant (virtual_stack_vars_rtx, offset);
436 x = gen_rtx_MEM (DECL_MODE (decl), x);
437
438 /* Set alignment we actually gave this decl. */
439 offset -= frame_phase;
440 align = offset & -offset;
441 align *= BITS_PER_UNIT;
442 if (align > STACK_BOUNDARY || align == 0)
443 align = STACK_BOUNDARY;
444 DECL_ALIGN (decl) = align;
445 DECL_USER_ALIGN (decl) = 0;
446
447 set_mem_attributes (x, decl, true);
448 SET_DECL_RTL (decl, x);
449}
450
451/* A subroutine of expand_used_vars. Give each partition representative
452 a unique location within the stack frame. Update each partition member
453 with that location. */
454
455static void
456expand_stack_vars (void)
457{
458 size_t si, i, j, n = stack_vars_num;
459
460 for (si = 0; si < n; ++si)
461 {
462 HOST_WIDE_INT offset;
463
464 i = stack_vars_sorted[si];
465
466 /* Skip variables that aren't partition representatives, for now. */
467 if (stack_vars[i].representative != i)
468 continue;
469
470 offset = alloc_stack_frame_space (stack_vars[i].size,
471 stack_vars[i].alignb);
472
473 /* Create rtl for each variable based on their location within the
474 partition. */
475 for (j = i; j != EOC; j = stack_vars[j].next)
476 expand_one_stack_var_at (stack_vars[j].decl,
477 stack_vars[j].offset + offset);
478 }
479}
480
481/* A subroutine of expand_one_var. Called to immediately assign rtl
482 to a variable to be allocated in the stack frame. */
483
484static void
485expand_one_stack_var (tree var)
486{
487 HOST_WIDE_INT size, offset, align;
488
489 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
490 align = get_decl_align_unit (var);
491 offset = alloc_stack_frame_space (size, align);
492
493 expand_one_stack_var_at (var, offset);
494}
495
496/* A subroutine of expand_one_var. Called to assign rtl
497 to a TREE_STATIC VAR_DECL. */
498
499static void
500expand_one_static_var (tree var)
501{
502 /* If this is an inlined copy of a static local variable,
503 look up the original. */
504 var = DECL_ORIGIN (var);
505
506 /* If we've already processed this variable because of that, do nothing. */
507 if (TREE_ASM_WRITTEN (var))
508 return;
509
510 /* Give the front end a chance to do whatever. In practice, this is
511 resolving duplicate names for IMA in C. */
512 if (lang_hooks.expand_decl (var))
513 return;
514
515 /* Otherwise, just emit the variable. */
516 rest_of_decl_compilation (var, 0, 0);
517}
518
519/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
520 that will reside in a hard register. */
521
522static void
523expand_one_hard_reg_var (tree var)
524{
525 rest_of_decl_compilation (var, 0, 0);
526}
527
528/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
529 that will reside in a pseudo register. */
530
531static void
532expand_one_register_var (tree var)
533{
534 tree type = TREE_TYPE (var);
535 int unsignedp = TYPE_UNSIGNED (type);
536 enum machine_mode reg_mode
537 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
538 rtx x = gen_reg_rtx (reg_mode);
539
540 SET_DECL_RTL (var, x);
541
542 /* Note if the object is a user variable. */
543 if (!DECL_ARTIFICIAL (var))
544 {
545 mark_user_reg (x);
546
547 /* Trust user variables which have a pointer type to really
548 be pointers. Do not trust compiler generated temporaries
549 as our type system is totally busted as it relates to
550 pointer arithmetic which translates into lots of compiler
551 generated objects with pointer types, but which are not really
552 pointers. */
553 if (POINTER_TYPE_P (type))
554 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
555 }
556}
557
558/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
559 has some associated error, e.g. it's type is error-mark. We just need
560 to pick something that won't crash the rest of the compiler. */
561
562static void
563expand_one_error_var (tree var)
564{
565 enum machine_mode mode = DECL_MODE (var);
566 rtx x;
567
568 if (mode == BLKmode)
569 x = gen_rtx_MEM (BLKmode, const0_rtx);
570 else if (mode == VOIDmode)
571 x = const0_rtx;
572 else
573 x = gen_reg_rtx (mode);
574
575 SET_DECL_RTL (var, x);
576}
577
578/* A subroutine of expand_one_var. VAR is a variable that will be
579 allocated to the local stack frame. Return true if we wish to
580 add VAR to STACK_VARS so that it will be coalesced with other
581 variables. Return false to allocate VAR immediately.
582
583 This function is used to reduce the number of variables considered
584 for coalescing, which reduces the size of the quadratic problem. */
585
586static bool
587defer_stack_allocation (tree var, bool toplevel)
588{
589 /* Variables in the outermost scope automatically conflict with
590 every other variable. The only reason to want to defer them
591 at all is that, after sorting, we can more efficiently pack
592 small variables in the stack frame. Continue to defer at -O2. */
593 if (toplevel && optimize < 2)
594 return false;
595
596 /* Without optimization, *most* variables are allocated from the
597 stack, which makes the quadratic problem large exactly when we
598 want compilation to proceed as quickly as possible. On the
599 other hand, we don't want the function's stack frame size to
600 get completely out of hand. So we avoid adding scalars and
601 "small" aggregates to the list at all. */
602 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
603 return false;
604
605 return true;
606}
607
608/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 609 its flavor. Variables to be placed on the stack are not actually
1f6d3a08
RH
610 expanded yet, merely recorded. */
611
612static void
613expand_one_var (tree var, bool toplevel)
614{
615 if (TREE_CODE (var) != VAR_DECL)
616 lang_hooks.expand_decl (var);
617 else if (DECL_EXTERNAL (var))
618 ;
619 else if (DECL_VALUE_EXPR (var))
620 ;
621 else if (TREE_STATIC (var))
622 expand_one_static_var (var);
623 else if (DECL_RTL_SET_P (var))
624 ;
625 else if (TREE_TYPE (var) == error_mark_node)
626 expand_one_error_var (var);
627 else if (DECL_HARD_REGISTER (var))
628 expand_one_hard_reg_var (var);
629 else if (use_register_for_decl (var))
630 expand_one_register_var (var);
631 else if (defer_stack_allocation (var, toplevel))
632 add_stack_var (var);
633 else
634 expand_one_stack_var (var);
635}
636
637/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
638 expanding variables. Those variables that can be put into registers
639 are allocated pseudos; those that can't are put on the stack.
640
641 TOPLEVEL is true if this is the outermost BLOCK. */
642
643static void
644expand_used_vars_for_block (tree block, bool toplevel)
645{
646 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
647 tree t;
648
649 old_sv_num = toplevel ? 0 : stack_vars_num;
650
651 /* Expand all variables at this level. */
652 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
653 if (TREE_USED (t))
654 expand_one_var (t, toplevel);
655
656 this_sv_num = stack_vars_num;
657
658 /* Expand all variables at containing levels. */
659 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
660 expand_used_vars_for_block (t, false);
661
662 /* Since we do not track exact variable lifetimes (which is not even
663 possible for varibles whose address escapes), we mirror the block
664 tree in the interference graph. Here we cause all variables at this
665 level, and all sublevels, to conflict. Do make certain that a
666 variable conflicts with itself. */
667 if (old_sv_num < this_sv_num)
668 {
669 new_sv_num = stack_vars_num;
670 resize_stack_vars_conflict (new_sv_num);
671
672 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
673 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
674 add_stack_var_conflict (i, j);
1f6d3a08
RH
675 }
676}
677
678/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
679 and clear TREE_USED on all local variables. */
680
681static void
682clear_tree_used (tree block)
683{
684 tree t;
685
686 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
687 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
688 TREE_USED (t) = 0;
689
690 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
691 clear_tree_used (t);
692}
693
694/* Expand all variables used in the function. */
727a31fa
RH
695
696static void
697expand_used_vars (void)
698{
1f6d3a08 699 tree t, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 700
1f6d3a08
RH
701 /* Compute the phase of the stack frame for this function. */
702 {
703 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
704 int off = STARTING_FRAME_OFFSET % align;
705 frame_phase = off ? align - off : 0;
706 }
727a31fa 707
1f6d3a08
RH
708 /* Set TREE_USED on all variables in the unexpanded_var_list. */
709 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
710 TREE_USED (TREE_VALUE (t)) = 1;
727a31fa 711
1f6d3a08
RH
712 /* Clear TREE_USED on all variables associated with a block scope. */
713 clear_tree_used (outer_block);
714
715 /* At this point all variables on the unexpanded_var_list with TREE_USED
716 set are not associated with any block scope. Lay them out. */
717 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
718 {
719 tree var = TREE_VALUE (t);
720 bool expand_now = false;
721
722 /* We didn't set a block for static or extern because it's hard
723 to tell the difference between a global variable (re)declared
724 in a local scope, and one that's really declared there to
725 begin with. And it doesn't really matter much, since we're
726 not giving them stack space. Expand them now. */
727 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
728 expand_now = true;
729
730 /* Any variable that could have been hoisted into an SSA_NAME
731 will have been propagated anywhere the optimizers chose,
732 i.e. not confined to their original block. Allocate them
733 as if they were defined in the outermost scope. */
734 else if (is_gimple_reg (var))
735 expand_now = true;
736
737 /* If the variable is not associated with any block, then it
738 was created by the optimizers, and could be live anywhere
739 in the function. */
740 else if (TREE_USED (var))
741 expand_now = true;
742
743 /* Finally, mark all variables on the list as used. We'll use
744 this in a moment when we expand those associated with scopes. */
745 TREE_USED (var) = 1;
746
747 if (expand_now)
748 expand_one_var (var, true);
749 }
727a31fa 750 cfun->unexpanded_var_list = NULL_TREE;
1f6d3a08
RH
751
752 /* At this point, all variables within the block tree with TREE_USED
753 set are actually used by the optimized function. Lay them out. */
754 expand_used_vars_for_block (outer_block, true);
755
756 if (stack_vars_num > 0)
757 {
758 /* Due to the way alias sets work, no variables with non-conflicting
759 alias sets may be assigned the same address. Add conflicts to
760 reflect this. */
761 add_alias_set_conflicts ();
762
763 /* Now that we have collected all stack variables, and have computed a
764 minimal interference graph, attempt to save some stack space. */
765 partition_stack_vars ();
766 if (dump_file)
767 dump_stack_var_partition ();
768
769 /* Assign rtl to each variable based on these partitions. */
770 expand_stack_vars ();
771
772 /* Free up stack variable graph data. */
773 XDELETEVEC (stack_vars);
774 XDELETEVEC (stack_vars_sorted);
775 XDELETEVEC (stack_vars_conflict);
776 stack_vars = NULL;
777 stack_vars_alloc = stack_vars_num = 0;
778 stack_vars_conflict = NULL;
779 stack_vars_conflict_alloc = 0;
780 }
781
782 /* If the target requires that FRAME_OFFSET be aligned, do it. */
783 if (STACK_ALIGNMENT_NEEDED)
784 {
785 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
786 if (!FRAME_GROWS_DOWNWARD)
787 frame_offset += align - 1;
788 frame_offset &= -align;
789 }
727a31fa
RH
790}
791
792
80c7a9eb
RH
793/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
794 Returns a new basic block if we've terminated the current basic
795 block and created a new one. */
796
797static basic_block
798expand_gimple_cond_expr (basic_block bb, tree stmt)
799{
800 basic_block new_bb, dest;
801 edge new_edge;
802 edge true_edge;
803 edge false_edge;
804 tree pred = COND_EXPR_COND (stmt);
805 tree then_exp = COND_EXPR_THEN (stmt);
806 tree else_exp = COND_EXPR_ELSE (stmt);
807 rtx last;
808
809 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
810 if (EXPR_LOCUS (stmt))
811 {
812 emit_line_note (*(EXPR_LOCUS (stmt)));
813 record_block_change (TREE_BLOCK (stmt));
814 }
815
816 /* These flags have no purpose in RTL land. */
817 true_edge->flags &= ~EDGE_TRUE_VALUE;
818 false_edge->flags &= ~EDGE_FALSE_VALUE;
819
820 /* We can either have a pure conditional jump with one fallthru edge or
821 two-way jump that needs to be decomposed into two basic blocks. */
822 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
823 {
824 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
825 return NULL;
826 }
827 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
828 {
829 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
830 return NULL;
831 }
341c100f
NS
832 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
833 && TREE_CODE (else_exp) == GOTO_EXPR);
80c7a9eb
RH
834
835 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
836 last = get_last_insn ();
837 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
838
839 BB_END (bb) = last;
840 if (BARRIER_P (BB_END (bb)))
841 BB_END (bb) = PREV_INSN (BB_END (bb));
842 update_bb_for_insn (bb);
843
844 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
845 dest = false_edge->dest;
846 redirect_edge_succ (false_edge, new_bb);
847 false_edge->flags |= EDGE_FALLTHRU;
848 new_bb->count = false_edge->count;
849 new_bb->frequency = EDGE_FREQUENCY (false_edge);
850 new_edge = make_edge (new_bb, dest, 0);
851 new_edge->probability = REG_BR_PROB_BASE;
852 new_edge->count = new_bb->count;
853 if (BARRIER_P (BB_END (new_bb)))
854 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
855 update_bb_for_insn (new_bb);
856
857 if (dump_file)
858 {
859 dump_bb (bb, dump_file, 0);
860 dump_bb (new_bb, dump_file, 0);
861 }
862
863 return new_bb;
864}
865
866/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
224e770b
RH
867 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
868 generated a tail call (something that might be denied by the ABI
cea49550
RH
869 rules governing the call; see calls.c).
870
871 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
872 can still reach the rest of BB. The case here is __builtin_sqrt,
873 where the NaN result goes through the external function (with a
874 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
875
876static basic_block
cea49550 877expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
80c7a9eb
RH
878{
879 rtx last = get_last_insn ();
224e770b
RH
880 edge e;
881 int probability;
882 gcov_type count;
80c7a9eb
RH
883
884 expand_expr_stmt (stmt);
885
886 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
887 if (CALL_P (last) && SIBLING_CALL_P (last))
888 goto found;
80c7a9eb 889
cea49550 890 *can_fallthru = true;
224e770b 891 return NULL;
80c7a9eb 892
224e770b
RH
893 found:
894 /* ??? Wouldn't it be better to just reset any pending stack adjust?
895 Any instructions emitted here are about to be deleted. */
896 do_pending_stack_adjust ();
897
898 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
899 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
900 EH or abnormal edges, we shouldn't have created a tail call in
901 the first place. So it seems to me we should just be removing
902 all edges here, or redirecting the existing fallthru edge to
903 the exit block. */
904
905 e = bb->succ;
906 probability = 0;
907 count = 0;
908 while (e)
909 {
910 edge next = e->succ_next;
911
912 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
913 {
914 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 915 {
224e770b
RH
916 e->dest->count -= e->count;
917 e->dest->frequency -= EDGE_FREQUENCY (e);
918 if (e->dest->count < 0)
919 e->dest->count = 0;
920 if (e->dest->frequency < 0)
921 e->dest->frequency = 0;
80c7a9eb 922 }
224e770b
RH
923 count += e->count;
924 probability += e->probability;
925 remove_edge (e);
80c7a9eb 926 }
224e770b
RH
927
928 e = next;
80c7a9eb
RH
929 }
930
224e770b
RH
931 /* This is somewhat ugly: the call_expr expander often emits instructions
932 after the sibcall (to perform the function return). These confuse the
933 find_sub_basic_blocks code, so we need to get rid of these. */
934 last = NEXT_INSN (last);
341c100f 935 gcc_assert (BARRIER_P (last));
cea49550
RH
936
937 *can_fallthru = false;
224e770b
RH
938 while (NEXT_INSN (last))
939 {
940 /* For instance an sqrt builtin expander expands if with
941 sibcall in the then and label for `else`. */
942 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
943 {
944 *can_fallthru = true;
945 break;
946 }
224e770b
RH
947 delete_insn (NEXT_INSN (last));
948 }
949
950 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
951 e->probability += probability;
952 e->count += count;
953 BB_END (bb) = last;
954 update_bb_for_insn (bb);
955
956 if (NEXT_INSN (last))
957 {
958 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
959
960 last = BB_END (bb);
961 if (BARRIER_P (last))
962 BB_END (bb) = PREV_INSN (last);
963 }
964
965 return bb;
80c7a9eb
RH
966}
967
242229bb
JH
968/* Expand basic block BB from GIMPLE trees to RTL. */
969
970static basic_block
80c7a9eb 971expand_gimple_basic_block (basic_block bb, FILE * dump_file)
242229bb
JH
972{
973 block_stmt_iterator bsi = bsi_start (bb);
974 tree stmt = NULL;
975 rtx note, last;
976 edge e;
977
978 if (dump_file)
979 {
980 tree_register_cfg_hooks ();
981 dump_bb (bb, dump_file, 0);
982 rtl_register_cfg_hooks ();
983 }
984
985 if (!bsi_end_p (bsi))
986 stmt = bsi_stmt (bsi);
987
988 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
989 {
990 last = get_last_insn ();
991
4dfa0342 992 expand_expr_stmt (stmt);
242229bb 993
caf93cb0 994 /* Java emits line number notes in the top of labels.
242229bb
JH
995 ??? Make this go away once line number notes are obsoleted. */
996 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 997 if (NOTE_P (BB_HEAD (bb)))
242229bb
JH
998 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
999 bsi_next (&bsi);
1000 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
1001 }
1002 else
1003 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1004
1005 NOTE_BASIC_BLOCK (note) = bb;
1006
1007 e = bb->succ;
1008 while (e)
1009 {
1010 edge next = e->succ_next;
1011
1012 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1013 e->flags &= ~EDGE_EXECUTABLE;
1014
1015 /* At the moment not all abnormal edges match the RTL representation.
1016 It is safe to remove them here as find_sub_basic_blocks will
1017 rediscover them. In the future we should get this fixed properly. */
1018 if (e->flags & EDGE_ABNORMAL)
1019 remove_edge (e);
1020
1021 e = next;
1022 }
1023
1024 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1025 {
1026 tree stmt = bsi_stmt (bsi);
cea49550 1027 basic_block new_bb;
242229bb
JH
1028
1029 if (!stmt)
1030 continue;
1031
1032 /* Expand this statement, then evaluate the resulting RTL and
1033 fixup the CFG accordingly. */
80c7a9eb 1034 if (TREE_CODE (stmt) == COND_EXPR)
cea49550
RH
1035 {
1036 new_bb = expand_gimple_cond_expr (bb, stmt);
1037 if (new_bb)
1038 return new_bb;
1039 }
80c7a9eb 1040 else
242229bb 1041 {
80c7a9eb
RH
1042 tree call = get_call_expr_in (stmt);
1043 if (call && CALL_EXPR_TAILCALL (call))
cea49550
RH
1044 {
1045 bool can_fallthru;
1046 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1047 if (new_bb)
1048 {
1049 if (can_fallthru)
1050 bb = new_bb;
1051 else
1052 return new_bb;
1053 }
1054 }
80c7a9eb
RH
1055 else
1056 expand_expr_stmt (stmt);
242229bb
JH
1057 }
1058 }
1059
1060 do_pending_stack_adjust ();
1061
1062 /* Find the the block tail. The last insn is the block is the insn
1063 before a barrier and/or table jump insn. */
1064 last = get_last_insn ();
4b4bf941 1065 if (BARRIER_P (last))
242229bb
JH
1066 last = PREV_INSN (last);
1067 if (JUMP_TABLE_DATA_P (last))
1068 last = PREV_INSN (PREV_INSN (last));
1069 BB_END (bb) = last;
caf93cb0 1070
242229bb
JH
1071 if (dump_file)
1072 dump_bb (bb, dump_file, 0);
1073 update_bb_for_insn (bb);
80c7a9eb 1074
242229bb
JH
1075 return bb;
1076}
1077
1078
1079/* Create a basic block for initialization code. */
1080
1081static basic_block
1082construct_init_block (void)
1083{
1084 basic_block init_block, first_block;
1085 edge e;
1086
242229bb
JH
1087 for (e = ENTRY_BLOCK_PTR->succ; e; e = e->succ_next)
1088 if (e->dest == ENTRY_BLOCK_PTR->next_bb)
1089 break;
1090
1091 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1092 get_last_insn (),
1093 ENTRY_BLOCK_PTR);
1094 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1095 init_block->count = ENTRY_BLOCK_PTR->count;
1096 if (e)
1097 {
1098 first_block = e->dest;
1099 redirect_edge_succ (e, init_block);
1100 e = make_edge (init_block, first_block, EDGE_FALLTHRU);
1101 }
1102 else
1103 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1104 e->probability = REG_BR_PROB_BASE;
1105 e->count = ENTRY_BLOCK_PTR->count;
1106
1107 update_bb_for_insn (init_block);
1108 return init_block;
1109}
1110
1111
1112/* Create a block containing landing pads and similar stuff. */
1113
1114static void
1115construct_exit_block (void)
1116{
1117 rtx head = get_last_insn ();
1118 rtx end;
1119 basic_block exit_block;
1120 edge e, e2, next;
1121
caf93cb0 1122 /* Make sure the locus is set to the end of the function, so that
242229bb 1123 epilogue line numbers and warnings are set properly. */
6773e15f
PB
1124#ifdef USE_MAPPED_LOCATION
1125 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1126#else
242229bb 1127 if (cfun->function_end_locus.file)
6773e15f 1128#endif
242229bb
JH
1129 input_location = cfun->function_end_locus;
1130
1131 /* The following insns belong to the top scope. */
1132 record_block_change (DECL_INITIAL (current_function_decl));
1133
242229bb
JH
1134 /* Generate rtl for function exit. */
1135 expand_function_end ();
1136
1137 end = get_last_insn ();
1138 if (head == end)
1139 return;
4b4bf941 1140 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 1141 head = NEXT_INSN (head);
80c7a9eb
RH
1142 exit_block = create_basic_block (NEXT_INSN (head), end,
1143 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
1144 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1145 exit_block->count = EXIT_BLOCK_PTR->count;
1146 for (e = EXIT_BLOCK_PTR->pred; e; e = next)
1147 {
1148 next = e->pred_next;
1149 if (!(e->flags & EDGE_ABNORMAL))
1150 redirect_edge_succ (e, exit_block);
1151 }
1152 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1153 e->probability = REG_BR_PROB_BASE;
1154 e->count = EXIT_BLOCK_PTR->count;
1155 for (e2 = EXIT_BLOCK_PTR->pred; e2; e2 = e2->pred_next)
1156 if (e2 != e)
1157 {
1158 e->count -= e2->count;
1159 exit_block->count -= e2->count;
1160 exit_block->frequency -= EDGE_FREQUENCY (e2);
1161 }
1162 if (e->count < 0)
1163 e->count = 0;
1164 if (exit_block->count < 0)
1165 exit_block->count = 0;
1166 if (exit_block->frequency < 0)
1167 exit_block->frequency = 0;
1168 update_bb_for_insn (exit_block);
1169}
1170
242229bb
JH
1171/* Translate the intermediate representation contained in the CFG
1172 from GIMPLE trees to RTL.
1173
1174 We do conversion per basic block and preserve/update the tree CFG.
1175 This implies we have to do some magic as the CFG can simultaneously
1176 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 1177 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
1178 the expansion. */
1179
1180static void
1181tree_expand_cfg (void)
1182{
1183 basic_block bb, init_block;
1184 sbitmap blocks;
1185
878f99d2
JH
1186 profile_status = PROFILE_ABSENT;
1187
4586b4ca
SB
1188 /* Some backends want to know that we are expanding to RTL. */
1189 currently_expanding_to_rtl = 1;
1190
6429e3be
RH
1191 /* Prepare the rtl middle end to start recording block changes. */
1192 reset_block_changes ();
1193
727a31fa 1194 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
1195 expand_used_vars ();
1196
1197 /* Set up parameters and prepare for return, for the function. */
b79c5284 1198 expand_function_start (current_function_decl);
242229bb
JH
1199
1200 /* If this function is `main', emit a call to `__main'
1201 to run global initializers, etc. */
1202 if (DECL_NAME (current_function_decl)
1203 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1204 && DECL_FILE_SCOPE_P (current_function_decl))
1205 expand_main_function ();
1206
3fbd86b1 1207 /* Register rtl specific functions for cfg. */
242229bb
JH
1208 rtl_register_cfg_hooks ();
1209
1210 init_block = construct_init_block ();
1211
1212 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
80c7a9eb 1213 bb = expand_gimple_basic_block (bb, dump_file);
242229bb
JH
1214
1215 construct_exit_block ();
1216
4586b4ca
SB
1217 /* We're done expanding trees to RTL. */
1218 currently_expanding_to_rtl = 0;
1219
242229bb 1220 /* Convert from NOTE_INSN_EH_REGION style notes, and do other
9f8628ba 1221 sorts of eh initialization. */
242229bb
JH
1222 convert_from_eh_region_ranges ();
1223
1224 rebuild_jump_labels (get_insns ());
1225 find_exception_handler_labels ();
1226
1227 blocks = sbitmap_alloc (last_basic_block);
1228 sbitmap_ones (blocks);
1229 find_many_sub_basic_blocks (blocks);
1230 purge_all_dead_edges (0);
1231 sbitmap_free (blocks);
1232
1233 compact_blocks ();
1234#ifdef ENABLE_CHECKING
1235 verify_flow_info();
1236#endif
9f8628ba
PB
1237
1238 /* There's no need to defer outputting this function any more; we
1239 know we want to output it. */
1240 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1241
1242 /* Now that we're done expanding trees to RTL, we shouldn't have any
1243 more CONCATs anywhere. */
1244 generating_concat_p = 0;
1245
1246 finalize_block_changes ();
242229bb
JH
1247}
1248
1249struct tree_opt_pass pass_expand =
1250{
1251 "expand", /* name */
1252 NULL, /* gate */
1253 tree_expand_cfg, /* execute */
1254 NULL, /* sub */
1255 NULL, /* next */
1256 0, /* static_pass_number */
1257 TV_EXPAND, /* tv_id */
1258 /* ??? If TER is enabled, we actually receive GENERIC. */
1259 PROP_gimple_leh | PROP_cfg, /* properties_required */
1260 PROP_rtl, /* properties_provided */
1261 PROP_gimple_leh, /* properties_destroyed */
1262 0, /* todo_flags_start */
9f8628ba
PB
1263 0, /* todo_flags_finish */
1264 'r' /* letter */
242229bb 1265};