]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR c/23228 (Silly "unused variable" warning after redeclaration of a local variable)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
f93089d2 2 Copyright (C) 2004, 2005 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
8the Free Software Foundation; either version 2, or (at your option)
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
17along with GCC; see the file COPYING. If not, write to
366ccddb
KC
18the Free Software Foundation, 51 Franklin Street, Fifth Floor,
19Boston, MA 02110-1301, USA. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61
RH
41#include "params.h"
42
e53de54d
JH
43/* Verify that there is exactly single jump instruction since last and attach
44 REG_BR_PROB note specifying probability.
45 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
46 re-distribute it when the conditional expands into multiple conditionals.
47 This is however difficult to do. */
e53de54d
JH
48static void
49add_reg_br_prob_note (FILE *dump_file, rtx last, int probability)
50{
51 if (profile_status == PROFILE_ABSENT)
52 return;
53 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 54 if (JUMP_P (last))
e53de54d
JH
55 {
56 /* It is common to emit condjump-around-jump sequence when we don't know
57 how to reverse the conditional. Special case this. */
58 if (!any_condjump_p (last)
2ca202e7 59 || !JUMP_P (NEXT_INSN (last))
e53de54d 60 || !simplejump_p (NEXT_INSN (last))
2ca202e7
KH
61 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
62 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
63 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
64 goto failed;
41806d92 65 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
66 REG_NOTES (last)
67 = gen_rtx_EXPR_LIST (REG_BR_PROB,
68 GEN_INT (REG_BR_PROB_BASE - probability),
69 REG_NOTES (last));
70 return;
71 }
2ca202e7 72 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
73 goto failed;
74 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
75 REG_NOTES (last)
76 = gen_rtx_EXPR_LIST (REG_BR_PROB,
77 GEN_INT (probability), REG_NOTES (last));
78 return;
79failed:
80 if (dump_file)
81 fprintf (dump_file, "Failed to add probability note\n");
82}
83
80c7a9eb 84
1f6d3a08
RH
85#ifndef LOCAL_ALIGNMENT
86#define LOCAL_ALIGNMENT(TYPE, ALIGNMENT) ALIGNMENT
87#endif
88
89#ifndef STACK_ALIGNMENT_NEEDED
90#define STACK_ALIGNMENT_NEEDED 1
91#endif
92
1f6d3a08
RH
93
94/* This structure holds data relevant to one variable that will be
95 placed in a stack slot. */
96struct stack_var
97{
98 /* The Variable. */
99 tree decl;
100
101 /* The offset of the variable. During partitioning, this is the
102 offset relative to the partition. After partitioning, this
103 is relative to the stack frame. */
104 HOST_WIDE_INT offset;
105
106 /* Initially, the size of the variable. Later, the size of the partition,
107 if this variable becomes it's partition's representative. */
108 HOST_WIDE_INT size;
109
110 /* The *byte* alignment required for this variable. Or as, with the
111 size, the alignment for this partition. */
112 unsigned int alignb;
113
114 /* The partition representative. */
115 size_t representative;
116
117 /* The next stack variable in the partition, or EOC. */
118 size_t next;
119};
120
121#define EOC ((size_t)-1)
122
123/* We have an array of such objects while deciding allocation. */
124static struct stack_var *stack_vars;
125static size_t stack_vars_alloc;
126static size_t stack_vars_num;
127
128/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
129 is non-decreasing. */
130static size_t *stack_vars_sorted;
131
132/* We have an interference graph between such objects. This graph
133 is lower triangular. */
134static bool *stack_vars_conflict;
135static size_t stack_vars_conflict_alloc;
136
137/* The phase of the stack frame. This is the known misalignment of
138 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
139 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
140static int frame_phase;
141
7d69de61
RH
142/* Used during expand_used_vars to remember if we saw any decls for
143 which we'd like to enable stack smashing protection. */
144static bool has_protected_decls;
145
146/* Used during expand_used_vars. Remember if we say a character buffer
147 smaller than our cutoff threshold. Used for -Wstack-protector. */
148static bool has_short_buffer;
1f6d3a08
RH
149
150/* Discover the byte alignment to use for DECL. Ignore alignment
151 we can't do with expected alignment of the stack boundary. */
152
153static unsigned int
154get_decl_align_unit (tree decl)
155{
156 unsigned int align;
157
158 align = DECL_ALIGN (decl);
159 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
160 if (align > PREFERRED_STACK_BOUNDARY)
161 align = PREFERRED_STACK_BOUNDARY;
162 if (cfun->stack_alignment_needed < align)
163 cfun->stack_alignment_needed = align;
164
165 return align / BITS_PER_UNIT;
166}
167
168/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
169 Return the frame offset. */
170
171static HOST_WIDE_INT
172alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
173{
174 HOST_WIDE_INT offset, new_frame_offset;
175
176 new_frame_offset = frame_offset;
177 if (FRAME_GROWS_DOWNWARD)
178 {
179 new_frame_offset -= size + frame_phase;
180 new_frame_offset &= -align;
181 new_frame_offset += frame_phase;
182 offset = new_frame_offset;
183 }
184 else
185 {
186 new_frame_offset -= frame_phase;
187 new_frame_offset += align - 1;
188 new_frame_offset &= -align;
189 new_frame_offset += frame_phase;
190 offset = new_frame_offset;
191 new_frame_offset += size;
192 }
193 frame_offset = new_frame_offset;
194
195 return offset;
196}
197
198/* Accumulate DECL into STACK_VARS. */
199
200static void
201add_stack_var (tree decl)
202{
203 if (stack_vars_num >= stack_vars_alloc)
204 {
205 if (stack_vars_alloc)
206 stack_vars_alloc = stack_vars_alloc * 3 / 2;
207 else
208 stack_vars_alloc = 32;
209 stack_vars
210 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
211 }
212 stack_vars[stack_vars_num].decl = decl;
213 stack_vars[stack_vars_num].offset = 0;
214 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
215 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
216
217 /* All variables are initially in their own partition. */
218 stack_vars[stack_vars_num].representative = stack_vars_num;
219 stack_vars[stack_vars_num].next = EOC;
220
221 /* Ensure that this decl doesn't get put onto the list twice. */
222 SET_DECL_RTL (decl, pc_rtx);
223
224 stack_vars_num++;
225}
226
227/* Compute the linear index of a lower-triangular coordinate (I, J). */
228
229static size_t
230triangular_index (size_t i, size_t j)
231{
232 if (i < j)
233 {
234 size_t t;
235 t = i, i = j, j = t;
236 }
237 return (i * (i + 1)) / 2 + j;
238}
239
240/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
241
242static void
243resize_stack_vars_conflict (size_t n)
244{
245 size_t size = triangular_index (n-1, n-1) + 1;
246
247 if (size <= stack_vars_conflict_alloc)
248 return;
249
250 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
251 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
252 (size - stack_vars_conflict_alloc) * sizeof (bool));
253 stack_vars_conflict_alloc = size;
254}
255
256/* Make the decls associated with luid's X and Y conflict. */
257
258static void
259add_stack_var_conflict (size_t x, size_t y)
260{
261 size_t index = triangular_index (x, y);
262 gcc_assert (index < stack_vars_conflict_alloc);
263 stack_vars_conflict[index] = true;
264}
265
266/* Check whether the decls associated with luid's X and Y conflict. */
267
268static bool
269stack_var_conflict_p (size_t x, size_t y)
270{
271 size_t index = triangular_index (x, y);
272 gcc_assert (index < stack_vars_conflict_alloc);
273 return stack_vars_conflict[index];
274}
275
276/* A subroutine of expand_used_vars. If two variables X and Y have alias
277 sets that do not conflict, then do add a conflict for these variables
278 in the interference graph. We also have to mind MEM_IN_STRUCT_P and
279 MEM_SCALAR_P. */
280
281static void
282add_alias_set_conflicts (void)
283{
284 size_t i, j, n = stack_vars_num;
285
286 for (i = 0; i < n; ++i)
287 {
a4d25453
RH
288 tree type_i = TREE_TYPE (stack_vars[i].decl);
289 bool aggr_i = AGGREGATE_TYPE_P (type_i);
1f6d3a08
RH
290
291 for (j = 0; j < i; ++j)
292 {
a4d25453
RH
293 tree type_j = TREE_TYPE (stack_vars[j].decl);
294 bool aggr_j = AGGREGATE_TYPE_P (type_j);
295 if (aggr_i != aggr_j || !objects_must_conflict_p (type_i, type_j))
1f6d3a08
RH
296 add_stack_var_conflict (i, j);
297 }
298 }
299}
300
301/* A subroutine of partition_stack_vars. A comparison function for qsort,
302 sorting an array of indicies by the size of the object. */
303
304static int
305stack_var_size_cmp (const void *a, const void *b)
306{
307 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
308 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
309
310 if (sa < sb)
311 return -1;
312 if (sa > sb)
313 return 1;
314 return 0;
315}
316
317/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
318 partitioning algorithm. Partitions A and B are known to be non-conflicting.
319 Merge them into a single partition A.
320
321 At the same time, add OFFSET to all variables in partition B. At the end
322 of the partitioning process we've have a nice block easy to lay out within
323 the stack frame. */
324
325static void
326union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
327{
328 size_t i, last;
329
330 /* Update each element of partition B with the given offset,
331 and merge them into partition A. */
332 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
333 {
334 stack_vars[i].offset += offset;
335 stack_vars[i].representative = a;
336 }
337 stack_vars[last].next = stack_vars[a].next;
338 stack_vars[a].next = b;
339
340 /* Update the required alignment of partition A to account for B. */
341 if (stack_vars[a].alignb < stack_vars[b].alignb)
342 stack_vars[a].alignb = stack_vars[b].alignb;
343
344 /* Update the interference graph and merge the conflicts. */
345 for (last = stack_vars_num, i = 0; i < last; ++i)
346 if (stack_var_conflict_p (b, i))
347 add_stack_var_conflict (a, i);
348}
349
350/* A subroutine of expand_used_vars. Binpack the variables into
351 partitions constrained by the interference graph. The overall
352 algorithm used is as follows:
353
354 Sort the objects by size.
355 For each object A {
356 S = size(A)
357 O = 0
358 loop {
359 Look for the largest non-conflicting object B with size <= S.
360 UNION (A, B)
361 offset(B) = O
362 O += size(B)
363 S -= size(B)
364 }
365 }
366*/
367
368static void
369partition_stack_vars (void)
370{
371 size_t si, sj, n = stack_vars_num;
372
373 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
374 for (si = 0; si < n; ++si)
375 stack_vars_sorted[si] = si;
376
377 if (n == 1)
378 return;
379
380 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
381
382 /* Special case: detect when all variables conflict, and thus we can't
383 do anything during the partitioning loop. It isn't uncommon (with
384 C code at least) to declare all variables at the top of the function,
385 and if we're not inlining, then all variables will be in the same scope.
386 Take advantage of very fast libc routines for this scan. */
387 gcc_assert (sizeof(bool) == sizeof(char));
388 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
389 return;
390
391 for (si = 0; si < n; ++si)
392 {
393 size_t i = stack_vars_sorted[si];
394 HOST_WIDE_INT isize = stack_vars[i].size;
395 HOST_WIDE_INT offset = 0;
396
397 for (sj = si; sj-- > 0; )
398 {
399 size_t j = stack_vars_sorted[sj];
400 HOST_WIDE_INT jsize = stack_vars[j].size;
401 unsigned int jalign = stack_vars[j].alignb;
402
403 /* Ignore objects that aren't partition representatives. */
404 if (stack_vars[j].representative != j)
405 continue;
406
407 /* Ignore objects too large for the remaining space. */
408 if (isize < jsize)
409 continue;
410
411 /* Ignore conflicting objects. */
412 if (stack_var_conflict_p (i, j))
413 continue;
414
415 /* Refine the remaining space check to include alignment. */
416 if (offset & (jalign - 1))
417 {
418 HOST_WIDE_INT toff = offset;
419 toff += jalign - 1;
420 toff &= -(HOST_WIDE_INT)jalign;
421 if (isize - (toff - offset) < jsize)
422 continue;
423
424 isize -= toff - offset;
425 offset = toff;
426 }
427
428 /* UNION the objects, placing J at OFFSET. */
429 union_stack_vars (i, j, offset);
430
431 isize -= jsize;
432 if (isize == 0)
433 break;
434 }
435 }
436}
437
438/* A debugging aid for expand_used_vars. Dump the generated partitions. */
439
440static void
441dump_stack_var_partition (void)
442{
443 size_t si, i, j, n = stack_vars_num;
444
445 for (si = 0; si < n; ++si)
446 {
447 i = stack_vars_sorted[si];
448
449 /* Skip variables that aren't partition representatives, for now. */
450 if (stack_vars[i].representative != i)
451 continue;
452
453 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
454 " align %u\n", (unsigned long) i, stack_vars[i].size,
455 stack_vars[i].alignb);
456
457 for (j = i; j != EOC; j = stack_vars[j].next)
458 {
459 fputc ('\t', dump_file);
460 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
461 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
462 stack_vars[i].offset);
463 }
464 }
465}
466
467/* Assign rtl to DECL at frame offset OFFSET. */
468
469static void
470expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
471{
472 HOST_WIDE_INT align;
473 rtx x;
474
475 /* If this fails, we've overflowed the stack frame. Error nicely? */
476 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
477
478 x = plus_constant (virtual_stack_vars_rtx, offset);
479 x = gen_rtx_MEM (DECL_MODE (decl), x);
480
481 /* Set alignment we actually gave this decl. */
482 offset -= frame_phase;
483 align = offset & -offset;
484 align *= BITS_PER_UNIT;
485 if (align > STACK_BOUNDARY || align == 0)
486 align = STACK_BOUNDARY;
487 DECL_ALIGN (decl) = align;
488 DECL_USER_ALIGN (decl) = 0;
489
490 set_mem_attributes (x, decl, true);
491 SET_DECL_RTL (decl, x);
492}
493
494/* A subroutine of expand_used_vars. Give each partition representative
495 a unique location within the stack frame. Update each partition member
496 with that location. */
497
498static void
7d69de61 499expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
500{
501 size_t si, i, j, n = stack_vars_num;
502
503 for (si = 0; si < n; ++si)
504 {
505 HOST_WIDE_INT offset;
506
507 i = stack_vars_sorted[si];
508
509 /* Skip variables that aren't partition representatives, for now. */
510 if (stack_vars[i].representative != i)
511 continue;
512
7d69de61
RH
513 /* Skip variables that have already had rtl assigned. See also
514 add_stack_var where we perpetrate this pc_rtx hack. */
515 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
516 continue;
517
518 /* Check the predicate to see whether this variable should be
519 allocated in this pass. */
520 if (pred && !pred (stack_vars[i].decl))
521 continue;
522
1f6d3a08
RH
523 offset = alloc_stack_frame_space (stack_vars[i].size,
524 stack_vars[i].alignb);
525
526 /* Create rtl for each variable based on their location within the
527 partition. */
528 for (j = i; j != EOC; j = stack_vars[j].next)
529 expand_one_stack_var_at (stack_vars[j].decl,
530 stack_vars[j].offset + offset);
531 }
532}
533
534/* A subroutine of expand_one_var. Called to immediately assign rtl
535 to a variable to be allocated in the stack frame. */
536
537static void
538expand_one_stack_var (tree var)
539{
540 HOST_WIDE_INT size, offset, align;
541
542 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
543 align = get_decl_align_unit (var);
544 offset = alloc_stack_frame_space (size, align);
545
546 expand_one_stack_var_at (var, offset);
547}
548
549/* A subroutine of expand_one_var. Called to assign rtl
550 to a TREE_STATIC VAR_DECL. */
551
552static void
553expand_one_static_var (tree var)
554{
3b9ade75
JH
555 /* In unit-at-a-time all the static variables are expanded at the end
556 of compilation process. */
557 if (flag_unit_at_a_time)
558 return;
1f6d3a08
RH
559 /* If this is an inlined copy of a static local variable,
560 look up the original. */
561 var = DECL_ORIGIN (var);
562
563 /* If we've already processed this variable because of that, do nothing. */
564 if (TREE_ASM_WRITTEN (var))
565 return;
566
567 /* Give the front end a chance to do whatever. In practice, this is
568 resolving duplicate names for IMA in C. */
569 if (lang_hooks.expand_decl (var))
570 return;
571
572 /* Otherwise, just emit the variable. */
573 rest_of_decl_compilation (var, 0, 0);
574}
575
576/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
577 that will reside in a hard register. */
578
579static void
580expand_one_hard_reg_var (tree var)
581{
582 rest_of_decl_compilation (var, 0, 0);
583}
584
585/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
586 that will reside in a pseudo register. */
587
588static void
589expand_one_register_var (tree var)
590{
591 tree type = TREE_TYPE (var);
592 int unsignedp = TYPE_UNSIGNED (type);
593 enum machine_mode reg_mode
594 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
595 rtx x = gen_reg_rtx (reg_mode);
596
597 SET_DECL_RTL (var, x);
598
599 /* Note if the object is a user variable. */
600 if (!DECL_ARTIFICIAL (var))
601 {
602 mark_user_reg (x);
603
604 /* Trust user variables which have a pointer type to really
605 be pointers. Do not trust compiler generated temporaries
606 as our type system is totally busted as it relates to
607 pointer arithmetic which translates into lots of compiler
608 generated objects with pointer types, but which are not really
609 pointers. */
610 if (POINTER_TYPE_P (type))
611 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
612 }
613}
614
615/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 616 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
617 to pick something that won't crash the rest of the compiler. */
618
619static void
620expand_one_error_var (tree var)
621{
622 enum machine_mode mode = DECL_MODE (var);
623 rtx x;
624
625 if (mode == BLKmode)
626 x = gen_rtx_MEM (BLKmode, const0_rtx);
627 else if (mode == VOIDmode)
628 x = const0_rtx;
629 else
630 x = gen_reg_rtx (mode);
631
632 SET_DECL_RTL (var, x);
633}
634
635/* A subroutine of expand_one_var. VAR is a variable that will be
636 allocated to the local stack frame. Return true if we wish to
637 add VAR to STACK_VARS so that it will be coalesced with other
638 variables. Return false to allocate VAR immediately.
639
640 This function is used to reduce the number of variables considered
641 for coalescing, which reduces the size of the quadratic problem. */
642
643static bool
644defer_stack_allocation (tree var, bool toplevel)
645{
7d69de61
RH
646 /* If stack protection is enabled, *all* stack variables must be deferred,
647 so that we can re-order the strings to the top of the frame. */
648 if (flag_stack_protect)
649 return true;
650
1f6d3a08
RH
651 /* Variables in the outermost scope automatically conflict with
652 every other variable. The only reason to want to defer them
653 at all is that, after sorting, we can more efficiently pack
654 small variables in the stack frame. Continue to defer at -O2. */
655 if (toplevel && optimize < 2)
656 return false;
657
658 /* Without optimization, *most* variables are allocated from the
659 stack, which makes the quadratic problem large exactly when we
660 want compilation to proceed as quickly as possible. On the
661 other hand, we don't want the function's stack frame size to
662 get completely out of hand. So we avoid adding scalars and
663 "small" aggregates to the list at all. */
664 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
665 return false;
666
667 return true;
668}
669
670/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 671 its flavor. Variables to be placed on the stack are not actually
1f6d3a08
RH
672 expanded yet, merely recorded. */
673
674static void
675expand_one_var (tree var, bool toplevel)
676{
677 if (TREE_CODE (var) != VAR_DECL)
678 lang_hooks.expand_decl (var);
679 else if (DECL_EXTERNAL (var))
680 ;
833b3afe 681 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
682 ;
683 else if (TREE_STATIC (var))
684 expand_one_static_var (var);
685 else if (DECL_RTL_SET_P (var))
686 ;
687 else if (TREE_TYPE (var) == error_mark_node)
688 expand_one_error_var (var);
689 else if (DECL_HARD_REGISTER (var))
690 expand_one_hard_reg_var (var);
691 else if (use_register_for_decl (var))
692 expand_one_register_var (var);
693 else if (defer_stack_allocation (var, toplevel))
694 add_stack_var (var);
695 else
696 expand_one_stack_var (var);
697}
698
699/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
700 expanding variables. Those variables that can be put into registers
701 are allocated pseudos; those that can't are put on the stack.
702
703 TOPLEVEL is true if this is the outermost BLOCK. */
704
705static void
706expand_used_vars_for_block (tree block, bool toplevel)
707{
708 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
709 tree t;
710
711 old_sv_num = toplevel ? 0 : stack_vars_num;
712
713 /* Expand all variables at this level. */
714 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
715 if (TREE_USED (t))
716 expand_one_var (t, toplevel);
717
718 this_sv_num = stack_vars_num;
719
720 /* Expand all variables at containing levels. */
721 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
722 expand_used_vars_for_block (t, false);
723
724 /* Since we do not track exact variable lifetimes (which is not even
725 possible for varibles whose address escapes), we mirror the block
726 tree in the interference graph. Here we cause all variables at this
727 level, and all sublevels, to conflict. Do make certain that a
728 variable conflicts with itself. */
729 if (old_sv_num < this_sv_num)
730 {
731 new_sv_num = stack_vars_num;
732 resize_stack_vars_conflict (new_sv_num);
733
734 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
735 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
736 add_stack_var_conflict (i, j);
1f6d3a08
RH
737 }
738}
739
740/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
741 and clear TREE_USED on all local variables. */
742
743static void
744clear_tree_used (tree block)
745{
746 tree t;
747
748 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
749 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
750 TREE_USED (t) = 0;
751
752 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
753 clear_tree_used (t);
754}
755
7d69de61
RH
756/* Examine TYPE and determine a bit mask of the following features. */
757
758#define SPCT_HAS_LARGE_CHAR_ARRAY 1
759#define SPCT_HAS_SMALL_CHAR_ARRAY 2
760#define SPCT_HAS_ARRAY 4
761#define SPCT_HAS_AGGREGATE 8
762
763static unsigned int
764stack_protect_classify_type (tree type)
765{
766 unsigned int ret = 0;
767 tree t;
768
769 switch (TREE_CODE (type))
770 {
771 case ARRAY_TYPE:
772 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
773 if (t == char_type_node
774 || t == signed_char_type_node
775 || t == unsigned_char_type_node)
776 {
15362b89
JJ
777 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
778 unsigned HOST_WIDE_INT len;
7d69de61 779
15362b89
JJ
780 if (!TYPE_SIZE_UNIT (type)
781 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
782 len = max;
7d69de61 783 else
15362b89 784 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
785
786 if (len < max)
787 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
788 else
789 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
790 }
791 else
792 ret = SPCT_HAS_ARRAY;
793 break;
794
795 case UNION_TYPE:
796 case QUAL_UNION_TYPE:
797 case RECORD_TYPE:
798 ret = SPCT_HAS_AGGREGATE;
799 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
800 if (TREE_CODE (t) == FIELD_DECL)
801 ret |= stack_protect_classify_type (TREE_TYPE (t));
802 break;
803
804 default:
805 break;
806 }
807
808 return ret;
809}
810
a4d05547
KH
811/* Return nonzero if DECL should be segregated into the "vulnerable" upper
812 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
813 any variable in this function. The return value is the phase number in
814 which the variable should be allocated. */
815
816static int
817stack_protect_decl_phase (tree decl)
818{
819 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
820 int ret = 0;
821
822 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
823 has_short_buffer = true;
824
825 if (flag_stack_protect == 2)
826 {
827 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
828 && !(bits & SPCT_HAS_AGGREGATE))
829 ret = 1;
830 else if (bits & SPCT_HAS_ARRAY)
831 ret = 2;
832 }
833 else
834 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
835
836 if (ret)
837 has_protected_decls = true;
838
839 return ret;
840}
841
842/* Two helper routines that check for phase 1 and phase 2. These are used
843 as callbacks for expand_stack_vars. */
844
845static bool
846stack_protect_decl_phase_1 (tree decl)
847{
848 return stack_protect_decl_phase (decl) == 1;
849}
850
851static bool
852stack_protect_decl_phase_2 (tree decl)
853{
854 return stack_protect_decl_phase (decl) == 2;
855}
856
857/* Ensure that variables in different stack protection phases conflict
858 so that they are not merged and share the same stack slot. */
859
860static void
861add_stack_protection_conflicts (void)
862{
863 size_t i, j, n = stack_vars_num;
864 unsigned char *phase;
865
866 phase = XNEWVEC (unsigned char, n);
867 for (i = 0; i < n; ++i)
868 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
869
870 for (i = 0; i < n; ++i)
871 {
872 unsigned char ph_i = phase[i];
873 for (j = 0; j < i; ++j)
874 if (ph_i != phase[j])
875 add_stack_var_conflict (i, j);
876 }
877
878 XDELETEVEC (phase);
879}
880
881/* Create a decl for the guard at the top of the stack frame. */
882
883static void
884create_stack_guard (void)
885{
886 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
887 TREE_THIS_VOLATILE (guard) = 1;
888 TREE_USED (guard) = 1;
889 expand_one_stack_var (guard);
890 cfun->stack_protect_guard = guard;
891}
892
1f6d3a08 893/* Expand all variables used in the function. */
727a31fa
RH
894
895static void
896expand_used_vars (void)
897{
1f6d3a08 898 tree t, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 899
1f6d3a08
RH
900 /* Compute the phase of the stack frame for this function. */
901 {
902 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
903 int off = STARTING_FRAME_OFFSET % align;
904 frame_phase = off ? align - off : 0;
905 }
727a31fa 906
1f6d3a08
RH
907 /* Set TREE_USED on all variables in the unexpanded_var_list. */
908 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
909 TREE_USED (TREE_VALUE (t)) = 1;
727a31fa 910
1f6d3a08
RH
911 /* Clear TREE_USED on all variables associated with a block scope. */
912 clear_tree_used (outer_block);
913
7d69de61
RH
914 /* Initialize local stack smashing state. */
915 has_protected_decls = false;
916 has_short_buffer = false;
917
1f6d3a08
RH
918 /* At this point all variables on the unexpanded_var_list with TREE_USED
919 set are not associated with any block scope. Lay them out. */
920 for (t = cfun->unexpanded_var_list; t; t = TREE_CHAIN (t))
921 {
922 tree var = TREE_VALUE (t);
923 bool expand_now = false;
924
925 /* We didn't set a block for static or extern because it's hard
926 to tell the difference between a global variable (re)declared
927 in a local scope, and one that's really declared there to
928 begin with. And it doesn't really matter much, since we're
929 not giving them stack space. Expand them now. */
930 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
931 expand_now = true;
932
933 /* Any variable that could have been hoisted into an SSA_NAME
934 will have been propagated anywhere the optimizers chose,
935 i.e. not confined to their original block. Allocate them
936 as if they were defined in the outermost scope. */
937 else if (is_gimple_reg (var))
938 expand_now = true;
939
940 /* If the variable is not associated with any block, then it
941 was created by the optimizers, and could be live anywhere
942 in the function. */
943 else if (TREE_USED (var))
944 expand_now = true;
945
946 /* Finally, mark all variables on the list as used. We'll use
947 this in a moment when we expand those associated with scopes. */
948 TREE_USED (var) = 1;
949
950 if (expand_now)
951 expand_one_var (var, true);
952 }
727a31fa 953 cfun->unexpanded_var_list = NULL_TREE;
1f6d3a08
RH
954
955 /* At this point, all variables within the block tree with TREE_USED
956 set are actually used by the optimized function. Lay them out. */
957 expand_used_vars_for_block (outer_block, true);
958
959 if (stack_vars_num > 0)
960 {
961 /* Due to the way alias sets work, no variables with non-conflicting
962 alias sets may be assigned the same address. Add conflicts to
963 reflect this. */
964 add_alias_set_conflicts ();
965
7d69de61
RH
966 /* If stack protection is enabled, we don't share space between
967 vulnerable data and non-vulnerable data. */
968 if (flag_stack_protect)
969 add_stack_protection_conflicts ();
970
1f6d3a08
RH
971 /* Now that we have collected all stack variables, and have computed a
972 minimal interference graph, attempt to save some stack space. */
973 partition_stack_vars ();
974 if (dump_file)
975 dump_stack_var_partition ();
7d69de61
RH
976 }
977
978 /* There are several conditions under which we should create a
979 stack guard: protect-all, alloca used, protected decls present. */
980 if (flag_stack_protect == 2
981 || (flag_stack_protect
982 && (current_function_calls_alloca || has_protected_decls)))
983 create_stack_guard ();
1f6d3a08 984
7d69de61
RH
985 /* Assign rtl to each variable based on these partitions. */
986 if (stack_vars_num > 0)
987 {
988 /* Reorder decls to be protected by iterating over the variables
989 array multiple times, and allocating out of each phase in turn. */
990 /* ??? We could probably integrate this into the qsort we did
991 earlier, such that we naturally see these variables first,
992 and thus naturally allocate things in the right order. */
993 if (has_protected_decls)
994 {
995 /* Phase 1 contains only character arrays. */
996 expand_stack_vars (stack_protect_decl_phase_1);
997
998 /* Phase 2 contains other kinds of arrays. */
999 if (flag_stack_protect == 2)
1000 expand_stack_vars (stack_protect_decl_phase_2);
1001 }
1002
1003 expand_stack_vars (NULL);
1f6d3a08
RH
1004
1005 /* Free up stack variable graph data. */
1006 XDELETEVEC (stack_vars);
1007 XDELETEVEC (stack_vars_sorted);
1008 XDELETEVEC (stack_vars_conflict);
1009 stack_vars = NULL;
1010 stack_vars_alloc = stack_vars_num = 0;
1011 stack_vars_conflict = NULL;
1012 stack_vars_conflict_alloc = 0;
1013 }
1014
1015 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1016 if (STACK_ALIGNMENT_NEEDED)
1017 {
1018 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1019 if (!FRAME_GROWS_DOWNWARD)
1020 frame_offset += align - 1;
1021 frame_offset &= -align;
1022 }
727a31fa
RH
1023}
1024
1025
b7211528
SB
1026/* If we need to produce a detailed dump, print the tree representation
1027 for STMT to the dump file. SINCE is the last RTX after which the RTL
1028 generated for STMT should have been appended. */
1029
1030static void
1031maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1032{
1033 if (dump_file && (dump_flags & TDF_DETAILS))
1034 {
1035 fprintf (dump_file, "\n;; ");
1036 print_generic_expr (dump_file, stmt, TDF_SLIM);
1037 fprintf (dump_file, "\n");
1038
1039 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1040 }
1041}
1042
80c7a9eb
RH
1043/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1044 Returns a new basic block if we've terminated the current basic
1045 block and created a new one. */
1046
1047static basic_block
1048expand_gimple_cond_expr (basic_block bb, tree stmt)
1049{
1050 basic_block new_bb, dest;
1051 edge new_edge;
1052 edge true_edge;
1053 edge false_edge;
1054 tree pred = COND_EXPR_COND (stmt);
1055 tree then_exp = COND_EXPR_THEN (stmt);
1056 tree else_exp = COND_EXPR_ELSE (stmt);
b7211528
SB
1057 rtx last2, last;
1058
1059 last2 = last = get_last_insn ();
80c7a9eb
RH
1060
1061 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1062 if (EXPR_LOCUS (stmt))
1063 {
1064 emit_line_note (*(EXPR_LOCUS (stmt)));
1065 record_block_change (TREE_BLOCK (stmt));
1066 }
1067
1068 /* These flags have no purpose in RTL land. */
1069 true_edge->flags &= ~EDGE_TRUE_VALUE;
1070 false_edge->flags &= ~EDGE_FALSE_VALUE;
1071
1072 /* We can either have a pure conditional jump with one fallthru edge or
1073 two-way jump that needs to be decomposed into two basic blocks. */
1074 if (TREE_CODE (then_exp) == GOTO_EXPR && IS_EMPTY_STMT (else_exp))
1075 {
1076 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
e53de54d 1077 add_reg_br_prob_note (dump_file, last, true_edge->probability);
b7211528 1078 maybe_dump_rtl_for_tree_stmt (stmt, last);
ac8245fa
AP
1079 if (EXPR_LOCUS (then_exp))
1080 emit_line_note (*(EXPR_LOCUS (then_exp)));
80c7a9eb
RH
1081 return NULL;
1082 }
1083 if (TREE_CODE (else_exp) == GOTO_EXPR && IS_EMPTY_STMT (then_exp))
1084 {
1085 jumpifnot (pred, label_rtx (GOTO_DESTINATION (else_exp)));
e53de54d 1086 add_reg_br_prob_note (dump_file, last, false_edge->probability);
b7211528 1087 maybe_dump_rtl_for_tree_stmt (stmt, last);
ac8245fa
AP
1088 if (EXPR_LOCUS (else_exp))
1089 emit_line_note (*(EXPR_LOCUS (else_exp)));
80c7a9eb
RH
1090 return NULL;
1091 }
341c100f
NS
1092 gcc_assert (TREE_CODE (then_exp) == GOTO_EXPR
1093 && TREE_CODE (else_exp) == GOTO_EXPR);
80c7a9eb
RH
1094
1095 jumpif (pred, label_rtx (GOTO_DESTINATION (then_exp)));
e53de54d 1096 add_reg_br_prob_note (dump_file, last, true_edge->probability);
80c7a9eb
RH
1097 last = get_last_insn ();
1098 expand_expr (else_exp, const0_rtx, VOIDmode, 0);
1099
1100 BB_END (bb) = last;
1101 if (BARRIER_P (BB_END (bb)))
1102 BB_END (bb) = PREV_INSN (BB_END (bb));
1103 update_bb_for_insn (bb);
1104
1105 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1106 dest = false_edge->dest;
1107 redirect_edge_succ (false_edge, new_bb);
1108 false_edge->flags |= EDGE_FALLTHRU;
1109 new_bb->count = false_edge->count;
1110 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1111 new_edge = make_edge (new_bb, dest, 0);
1112 new_edge->probability = REG_BR_PROB_BASE;
1113 new_edge->count = new_bb->count;
1114 if (BARRIER_P (BB_END (new_bb)))
1115 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1116 update_bb_for_insn (new_bb);
1117
b7211528 1118 maybe_dump_rtl_for_tree_stmt (stmt, last2);
ac8245fa
AP
1119
1120 if (EXPR_LOCUS (else_exp))
1121 emit_line_note (*(EXPR_LOCUS (else_exp)));
80c7a9eb
RH
1122
1123 return new_bb;
1124}
1125
1126/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
224e770b
RH
1127 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1128 generated a tail call (something that might be denied by the ABI
cea49550
RH
1129 rules governing the call; see calls.c).
1130
1131 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1132 can still reach the rest of BB. The case here is __builtin_sqrt,
1133 where the NaN result goes through the external function (with a
1134 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1135
1136static basic_block
cea49550 1137expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
80c7a9eb 1138{
b7211528 1139 rtx last2, last;
224e770b 1140 edge e;
628f6a4e 1141 edge_iterator ei;
224e770b
RH
1142 int probability;
1143 gcov_type count;
80c7a9eb 1144
b7211528
SB
1145 last2 = last = get_last_insn ();
1146
80c7a9eb
RH
1147 expand_expr_stmt (stmt);
1148
1149 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1150 if (CALL_P (last) && SIBLING_CALL_P (last))
1151 goto found;
80c7a9eb 1152
7315a949 1153 maybe_dump_rtl_for_tree_stmt (stmt, last2);
b7211528 1154
cea49550 1155 *can_fallthru = true;
224e770b 1156 return NULL;
80c7a9eb 1157
224e770b
RH
1158 found:
1159 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1160 Any instructions emitted here are about to be deleted. */
1161 do_pending_stack_adjust ();
1162
1163 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1164 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1165 EH or abnormal edges, we shouldn't have created a tail call in
1166 the first place. So it seems to me we should just be removing
1167 all edges here, or redirecting the existing fallthru edge to
1168 the exit block. */
1169
224e770b
RH
1170 probability = 0;
1171 count = 0;
224e770b 1172
628f6a4e
BE
1173 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1174 {
224e770b
RH
1175 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1176 {
1177 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1178 {
224e770b
RH
1179 e->dest->count -= e->count;
1180 e->dest->frequency -= EDGE_FREQUENCY (e);
1181 if (e->dest->count < 0)
1182 e->dest->count = 0;
1183 if (e->dest->frequency < 0)
1184 e->dest->frequency = 0;
80c7a9eb 1185 }
224e770b
RH
1186 count += e->count;
1187 probability += e->probability;
1188 remove_edge (e);
80c7a9eb 1189 }
628f6a4e
BE
1190 else
1191 ei_next (&ei);
80c7a9eb
RH
1192 }
1193
224e770b
RH
1194 /* This is somewhat ugly: the call_expr expander often emits instructions
1195 after the sibcall (to perform the function return). These confuse the
12eff7b7 1196 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1197 last = NEXT_INSN (last);
341c100f 1198 gcc_assert (BARRIER_P (last));
cea49550
RH
1199
1200 *can_fallthru = false;
224e770b
RH
1201 while (NEXT_INSN (last))
1202 {
1203 /* For instance an sqrt builtin expander expands if with
1204 sibcall in the then and label for `else`. */
1205 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1206 {
1207 *can_fallthru = true;
1208 break;
1209 }
224e770b
RH
1210 delete_insn (NEXT_INSN (last));
1211 }
1212
1213 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1214 e->probability += probability;
1215 e->count += count;
1216 BB_END (bb) = last;
1217 update_bb_for_insn (bb);
1218
1219 if (NEXT_INSN (last))
1220 {
1221 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1222
1223 last = BB_END (bb);
1224 if (BARRIER_P (last))
1225 BB_END (bb) = PREV_INSN (last);
1226 }
1227
b7211528
SB
1228 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1229
224e770b 1230 return bb;
80c7a9eb
RH
1231}
1232
242229bb
JH
1233/* Expand basic block BB from GIMPLE trees to RTL. */
1234
1235static basic_block
80c7a9eb 1236expand_gimple_basic_block (basic_block bb, FILE * dump_file)
242229bb
JH
1237{
1238 block_stmt_iterator bsi = bsi_start (bb);
1239 tree stmt = NULL;
1240 rtx note, last;
1241 edge e;
628f6a4e 1242 edge_iterator ei;
242229bb
JH
1243
1244 if (dump_file)
1245 {
b7211528
SB
1246 fprintf (dump_file,
1247 "\n;; Generating RTL for tree basic block %d\n",
1248 bb->index);
242229bb
JH
1249 }
1250
5e2d947c
JH
1251 init_rtl_bb_info (bb);
1252 bb->flags |= BB_RTL;
1253
242229bb
JH
1254 if (!bsi_end_p (bsi))
1255 stmt = bsi_stmt (bsi);
1256
1257 if (stmt && TREE_CODE (stmt) == LABEL_EXPR)
1258 {
1259 last = get_last_insn ();
1260
4dfa0342 1261 expand_expr_stmt (stmt);
242229bb 1262
caf93cb0 1263 /* Java emits line number notes in the top of labels.
242229bb
JH
1264 ??? Make this go away once line number notes are obsoleted. */
1265 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 1266 if (NOTE_P (BB_HEAD (bb)))
242229bb
JH
1267 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
1268 bsi_next (&bsi);
1269 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528
SB
1270
1271 maybe_dump_rtl_for_tree_stmt (stmt, last);
242229bb
JH
1272 }
1273 else
1274 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1275
1276 NOTE_BASIC_BLOCK (note) = bb;
1277
628f6a4e 1278 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
242229bb 1279 {
242229bb
JH
1280 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1281 e->flags &= ~EDGE_EXECUTABLE;
1282
1283 /* At the moment not all abnormal edges match the RTL representation.
12eff7b7 1284 It is safe to remove them here as find_many_sub_basic_blocks will
242229bb
JH
1285 rediscover them. In the future we should get this fixed properly. */
1286 if (e->flags & EDGE_ABNORMAL)
1287 remove_edge (e);
628f6a4e
BE
1288 else
1289 ei_next (&ei);
242229bb
JH
1290 }
1291
1292 for (; !bsi_end_p (bsi); bsi_next (&bsi))
1293 {
1294 tree stmt = bsi_stmt (bsi);
cea49550 1295 basic_block new_bb;
242229bb
JH
1296
1297 if (!stmt)
1298 continue;
1299
1300 /* Expand this statement, then evaluate the resulting RTL and
1301 fixup the CFG accordingly. */
80c7a9eb 1302 if (TREE_CODE (stmt) == COND_EXPR)
cea49550
RH
1303 {
1304 new_bb = expand_gimple_cond_expr (bb, stmt);
1305 if (new_bb)
1306 return new_bb;
1307 }
80c7a9eb 1308 else
242229bb 1309 {
80c7a9eb
RH
1310 tree call = get_call_expr_in (stmt);
1311 if (call && CALL_EXPR_TAILCALL (call))
cea49550
RH
1312 {
1313 bool can_fallthru;
1314 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1315 if (new_bb)
1316 {
1317 if (can_fallthru)
1318 bb = new_bb;
1319 else
1320 return new_bb;
1321 }
1322 }
80c7a9eb 1323 else
b7211528
SB
1324 {
1325 last = get_last_insn ();
1326 expand_expr_stmt (stmt);
1327 maybe_dump_rtl_for_tree_stmt (stmt, last);
1328 }
242229bb
JH
1329 }
1330 }
1331
1332 do_pending_stack_adjust ();
1333
3f117656 1334 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
1335 before a barrier and/or table jump insn. */
1336 last = get_last_insn ();
4b4bf941 1337 if (BARRIER_P (last))
242229bb
JH
1338 last = PREV_INSN (last);
1339 if (JUMP_TABLE_DATA_P (last))
1340 last = PREV_INSN (PREV_INSN (last));
1341 BB_END (bb) = last;
caf93cb0 1342
242229bb 1343 update_bb_for_insn (bb);
80c7a9eb 1344
242229bb
JH
1345 return bb;
1346}
1347
1348
1349/* Create a basic block for initialization code. */
1350
1351static basic_block
1352construct_init_block (void)
1353{
1354 basic_block init_block, first_block;
fd44f634
JH
1355 edge e = NULL;
1356 int flags;
275a4187 1357
fd44f634
JH
1358 /* Multiple entry points not supported yet. */
1359 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
1360 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1361 init_rtl_bb_info (EXIT_BLOCK_PTR);
1362 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1363 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 1364
fd44f634 1365 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 1366
fd44f634
JH
1367 /* When entry edge points to first basic block, we don't need jump,
1368 otherwise we have to jump into proper target. */
1369 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1370 {
1371 tree label = tree_block_label (e->dest);
1372
1373 emit_jump (label_rtx (label));
1374 flags = 0;
275a4187 1375 }
fd44f634
JH
1376 else
1377 flags = EDGE_FALLTHRU;
242229bb
JH
1378
1379 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1380 get_last_insn (),
1381 ENTRY_BLOCK_PTR);
1382 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1383 init_block->count = ENTRY_BLOCK_PTR->count;
1384 if (e)
1385 {
1386 first_block = e->dest;
1387 redirect_edge_succ (e, init_block);
fd44f634 1388 e = make_edge (init_block, first_block, flags);
242229bb
JH
1389 }
1390 else
1391 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1392 e->probability = REG_BR_PROB_BASE;
1393 e->count = ENTRY_BLOCK_PTR->count;
1394
1395 update_bb_for_insn (init_block);
1396 return init_block;
1397}
1398
1399
1400/* Create a block containing landing pads and similar stuff. */
1401
1402static void
1403construct_exit_block (void)
1404{
1405 rtx head = get_last_insn ();
1406 rtx end;
1407 basic_block exit_block;
628f6a4e
BE
1408 edge e, e2;
1409 unsigned ix;
1410 edge_iterator ei;
242229bb 1411
caf93cb0 1412 /* Make sure the locus is set to the end of the function, so that
242229bb 1413 epilogue line numbers and warnings are set properly. */
6773e15f
PB
1414#ifdef USE_MAPPED_LOCATION
1415 if (cfun->function_end_locus != UNKNOWN_LOCATION)
1416#else
242229bb 1417 if (cfun->function_end_locus.file)
6773e15f 1418#endif
242229bb
JH
1419 input_location = cfun->function_end_locus;
1420
1421 /* The following insns belong to the top scope. */
1422 record_block_change (DECL_INITIAL (current_function_decl));
1423
242229bb
JH
1424 /* Generate rtl for function exit. */
1425 expand_function_end ();
1426
1427 end = get_last_insn ();
1428 if (head == end)
1429 return;
4b4bf941 1430 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 1431 head = NEXT_INSN (head);
80c7a9eb
RH
1432 exit_block = create_basic_block (NEXT_INSN (head), end,
1433 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
1434 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1435 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
1436
1437 ix = 0;
1438 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 1439 {
8fb790fd 1440 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 1441 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
1442 redirect_edge_succ (e, exit_block);
1443 else
1444 ix++;
242229bb 1445 }
628f6a4e 1446
242229bb
JH
1447 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1448 e->probability = REG_BR_PROB_BASE;
1449 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 1450 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
1451 if (e2 != e)
1452 {
1453 e->count -= e2->count;
1454 exit_block->count -= e2->count;
1455 exit_block->frequency -= EDGE_FREQUENCY (e2);
1456 }
1457 if (e->count < 0)
1458 e->count = 0;
1459 if (exit_block->count < 0)
1460 exit_block->count = 0;
1461 if (exit_block->frequency < 0)
1462 exit_block->frequency = 0;
1463 update_bb_for_insn (exit_block);
1464}
1465
242229bb
JH
1466/* Translate the intermediate representation contained in the CFG
1467 from GIMPLE trees to RTL.
1468
1469 We do conversion per basic block and preserve/update the tree CFG.
1470 This implies we have to do some magic as the CFG can simultaneously
1471 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 1472 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
1473 the expansion. */
1474
1475static void
1476tree_expand_cfg (void)
1477{
1478 basic_block bb, init_block;
1479 sbitmap blocks;
1480
4586b4ca
SB
1481 /* Some backends want to know that we are expanding to RTL. */
1482 currently_expanding_to_rtl = 1;
1483
6429e3be
RH
1484 /* Prepare the rtl middle end to start recording block changes. */
1485 reset_block_changes ();
1486
727a31fa 1487 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
1488 expand_used_vars ();
1489
7d69de61
RH
1490 /* Honor stack protection warnings. */
1491 if (warn_stack_protect)
1492 {
1493 if (current_function_calls_alloca)
1494 warning (0, "not protecting local variables: variable length buffer");
1495 if (has_short_buffer && !cfun->stack_protect_guard)
1496 warning (0, "not protecting function: no buffer at least %d bytes long",
1497 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1498 }
1499
242229bb 1500 /* Set up parameters and prepare for return, for the function. */
b79c5284 1501 expand_function_start (current_function_decl);
242229bb
JH
1502
1503 /* If this function is `main', emit a call to `__main'
1504 to run global initializers, etc. */
1505 if (DECL_NAME (current_function_decl)
1506 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1507 && DECL_FILE_SCOPE_P (current_function_decl))
1508 expand_main_function ();
1509
7d69de61
RH
1510 /* Initialize the stack_protect_guard field. This must happen after the
1511 call to __main (if any) so that the external decl is initialized. */
1512 if (cfun->stack_protect_guard)
1513 stack_protect_prologue ();
1514
3fbd86b1 1515 /* Register rtl specific functions for cfg. */
242229bb
JH
1516 rtl_register_cfg_hooks ();
1517
1518 init_block = construct_init_block ();
1519
1520 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
80c7a9eb 1521 bb = expand_gimple_basic_block (bb, dump_file);
242229bb
JH
1522
1523 construct_exit_block ();
1524
4586b4ca
SB
1525 /* We're done expanding trees to RTL. */
1526 currently_expanding_to_rtl = 0;
1527
f39e46ba
SB
1528 /* Convert tree EH labels to RTL EH labels, and clean out any unreachable
1529 EH regions. */
242229bb
JH
1530 convert_from_eh_region_ranges ();
1531
1532 rebuild_jump_labels (get_insns ());
1533 find_exception_handler_labels ();
1534
1535 blocks = sbitmap_alloc (last_basic_block);
1536 sbitmap_ones (blocks);
1537 find_many_sub_basic_blocks (blocks);
25cd19de 1538 purge_all_dead_edges ();
242229bb
JH
1539 sbitmap_free (blocks);
1540
1541 compact_blocks ();
1542#ifdef ENABLE_CHECKING
1543 verify_flow_info();
1544#endif
9f8628ba
PB
1545
1546 /* There's no need to defer outputting this function any more; we
1547 know we want to output it. */
1548 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1549
1550 /* Now that we're done expanding trees to RTL, we shouldn't have any
1551 more CONCATs anywhere. */
1552 generating_concat_p = 0;
1553
1554 finalize_block_changes ();
b7211528
SB
1555
1556 if (dump_file)
1557 {
1558 fprintf (dump_file,
1559 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1560 /* And the pass manager will dump RTL for us. */
1561 }
ef330312
PB
1562
1563 /* If we're emitting a nested function, make sure its parent gets
1564 emitted as well. Doing otherwise confuses debug info. */
1565 {
1566 tree parent;
1567 for (parent = DECL_CONTEXT (current_function_decl);
1568 parent != NULL_TREE;
1569 parent = get_containing_scope (parent))
1570 if (TREE_CODE (parent) == FUNCTION_DECL)
1571 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
1572 }
1573
1574 /* We are now committed to emitting code for this function. Do any
1575 preparation, such as emitting abstract debug info for the inline
1576 before it gets mangled by optimization. */
1577 if (cgraph_function_possibly_inlined_p (current_function_decl))
1578 (*debug_hooks->outlining_inline_function) (current_function_decl);
1579
1580 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
1581
1582 /* After expanding, the return labels are no longer needed. */
1583 return_label = NULL;
1584 naked_return_label = NULL;
242229bb
JH
1585}
1586
1587struct tree_opt_pass pass_expand =
1588{
1589 "expand", /* name */
1590 NULL, /* gate */
1591 tree_expand_cfg, /* execute */
1592 NULL, /* sub */
1593 NULL, /* next */
1594 0, /* static_pass_number */
1595 TV_EXPAND, /* tv_id */
1596 /* ??? If TER is enabled, we actually receive GENERIC. */
1597 PROP_gimple_leh | PROP_cfg, /* properties_required */
1598 PROP_rtl, /* properties_provided */
1599 PROP_gimple_leh, /* properties_destroyed */
1600 0, /* todo_flags_start */
ef330312 1601 TODO_dump_func, /* todo_flags_finish */
9f8628ba 1602 'r' /* letter */
242229bb 1603};