]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
* raise-gcc.c: Move tsystem.h before tm.h.
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
2d593c86 2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "tree.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "basic-block.h"
28#include "function.h"
29#include "expr.h"
30#include "langhooks.h"
31#include "tree-flow.h"
32#include "timevar.h"
33#include "tree-dump.h"
34#include "tree-pass.h"
35#include "except.h"
36#include "flags.h"
1f6d3a08
RH
37#include "diagnostic.h"
38#include "toplev.h"
ef330312 39#include "debug.h"
7d69de61 40#include "params.h"
ff28a94d 41#include "tree-inline.h"
6946b3f7 42#include "value-prof.h"
e41b2a33 43#include "target.h"
7d69de61 44
e53de54d
JH
45/* Verify that there is exactly single jump instruction since last and attach
46 REG_BR_PROB note specifying probability.
47 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
48 re-distribute it when the conditional expands into multiple conditionals.
49 This is however difficult to do. */
ef950eba 50void
10d22567 51add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
52{
53 if (profile_status == PROFILE_ABSENT)
54 return;
55 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 56 if (JUMP_P (last))
e53de54d
JH
57 {
58 /* It is common to emit condjump-around-jump sequence when we don't know
59 how to reverse the conditional. Special case this. */
60 if (!any_condjump_p (last)
2ca202e7 61 || !JUMP_P (NEXT_INSN (last))
e53de54d 62 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 63 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 64 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 65 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 66 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
67 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
68 goto failed;
41806d92 69 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6
ILT
70 add_reg_note (last, REG_BR_PROB,
71 GEN_INT (REG_BR_PROB_BASE - probability));
e53de54d
JH
72 return;
73 }
2ca202e7 74 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
75 goto failed;
76 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6 77 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
e53de54d
JH
78 return;
79failed:
80 if (dump_file)
81 fprintf (dump_file, "Failed to add probability note\n");
82}
83
80c7a9eb 84
1f6d3a08
RH
85#ifndef STACK_ALIGNMENT_NEEDED
86#define STACK_ALIGNMENT_NEEDED 1
87#endif
88
1f6d3a08
RH
89
90/* This structure holds data relevant to one variable that will be
91 placed in a stack slot. */
92struct stack_var
93{
94 /* The Variable. */
95 tree decl;
96
97 /* The offset of the variable. During partitioning, this is the
98 offset relative to the partition. After partitioning, this
99 is relative to the stack frame. */
100 HOST_WIDE_INT offset;
101
102 /* Initially, the size of the variable. Later, the size of the partition,
103 if this variable becomes it's partition's representative. */
104 HOST_WIDE_INT size;
105
106 /* The *byte* alignment required for this variable. Or as, with the
107 size, the alignment for this partition. */
108 unsigned int alignb;
109
110 /* The partition representative. */
111 size_t representative;
112
113 /* The next stack variable in the partition, or EOC. */
114 size_t next;
115};
116
117#define EOC ((size_t)-1)
118
119/* We have an array of such objects while deciding allocation. */
120static struct stack_var *stack_vars;
121static size_t stack_vars_alloc;
122static size_t stack_vars_num;
123
fa10beec 124/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
125 is non-decreasing. */
126static size_t *stack_vars_sorted;
127
128/* We have an interference graph between such objects. This graph
129 is lower triangular. */
130static bool *stack_vars_conflict;
131static size_t stack_vars_conflict_alloc;
132
133/* The phase of the stack frame. This is the known misalignment of
134 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
135 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
136static int frame_phase;
137
7d69de61
RH
138/* Used during expand_used_vars to remember if we saw any decls for
139 which we'd like to enable stack smashing protection. */
140static bool has_protected_decls;
141
142/* Used during expand_used_vars. Remember if we say a character buffer
143 smaller than our cutoff threshold. Used for -Wstack-protector. */
144static bool has_short_buffer;
1f6d3a08
RH
145
146/* Discover the byte alignment to use for DECL. Ignore alignment
147 we can't do with expected alignment of the stack boundary. */
148
149static unsigned int
150get_decl_align_unit (tree decl)
151{
152 unsigned int align;
153
154 align = DECL_ALIGN (decl);
155 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
156 if (align > PREFERRED_STACK_BOUNDARY)
157 align = PREFERRED_STACK_BOUNDARY;
cb91fab0
JH
158 if (crtl->stack_alignment_needed < align)
159 crtl->stack_alignment_needed = align;
1f6d3a08
RH
160
161 return align / BITS_PER_UNIT;
162}
163
164/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
165 Return the frame offset. */
166
167static HOST_WIDE_INT
168alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
169{
170 HOST_WIDE_INT offset, new_frame_offset;
171
172 new_frame_offset = frame_offset;
173 if (FRAME_GROWS_DOWNWARD)
174 {
175 new_frame_offset -= size + frame_phase;
176 new_frame_offset &= -align;
177 new_frame_offset += frame_phase;
178 offset = new_frame_offset;
179 }
180 else
181 {
182 new_frame_offset -= frame_phase;
183 new_frame_offset += align - 1;
184 new_frame_offset &= -align;
185 new_frame_offset += frame_phase;
186 offset = new_frame_offset;
187 new_frame_offset += size;
188 }
189 frame_offset = new_frame_offset;
190
9fb798d7
EB
191 if (frame_offset_overflow (frame_offset, cfun->decl))
192 frame_offset = offset = 0;
193
1f6d3a08
RH
194 return offset;
195}
196
197/* Accumulate DECL into STACK_VARS. */
198
199static void
200add_stack_var (tree decl)
201{
202 if (stack_vars_num >= stack_vars_alloc)
203 {
204 if (stack_vars_alloc)
205 stack_vars_alloc = stack_vars_alloc * 3 / 2;
206 else
207 stack_vars_alloc = 32;
208 stack_vars
209 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
210 }
211 stack_vars[stack_vars_num].decl = decl;
212 stack_vars[stack_vars_num].offset = 0;
213 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
214 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
215
216 /* All variables are initially in their own partition. */
217 stack_vars[stack_vars_num].representative = stack_vars_num;
218 stack_vars[stack_vars_num].next = EOC;
219
220 /* Ensure that this decl doesn't get put onto the list twice. */
221 SET_DECL_RTL (decl, pc_rtx);
222
223 stack_vars_num++;
224}
225
226/* Compute the linear index of a lower-triangular coordinate (I, J). */
227
228static size_t
229triangular_index (size_t i, size_t j)
230{
231 if (i < j)
232 {
233 size_t t;
234 t = i, i = j, j = t;
235 }
236 return (i * (i + 1)) / 2 + j;
237}
238
239/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
240
241static void
242resize_stack_vars_conflict (size_t n)
243{
244 size_t size = triangular_index (n-1, n-1) + 1;
245
246 if (size <= stack_vars_conflict_alloc)
247 return;
248
249 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
250 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
251 (size - stack_vars_conflict_alloc) * sizeof (bool));
252 stack_vars_conflict_alloc = size;
253}
254
255/* Make the decls associated with luid's X and Y conflict. */
256
257static void
258add_stack_var_conflict (size_t x, size_t y)
259{
260 size_t index = triangular_index (x, y);
261 gcc_assert (index < stack_vars_conflict_alloc);
262 stack_vars_conflict[index] = true;
263}
264
265/* Check whether the decls associated with luid's X and Y conflict. */
266
267static bool
268stack_var_conflict_p (size_t x, size_t y)
269{
270 size_t index = triangular_index (x, y);
271 gcc_assert (index < stack_vars_conflict_alloc);
272 return stack_vars_conflict[index];
273}
d239ed56
SB
274
275/* Returns true if TYPE is or contains a union type. */
276
277static bool
278aggregate_contains_union_type (tree type)
279{
280 tree field;
281
282 if (TREE_CODE (type) == UNION_TYPE
283 || TREE_CODE (type) == QUAL_UNION_TYPE)
284 return true;
285 if (TREE_CODE (type) == ARRAY_TYPE)
286 return aggregate_contains_union_type (TREE_TYPE (type));
287 if (TREE_CODE (type) != RECORD_TYPE)
288 return false;
289
290 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
291 if (TREE_CODE (field) == FIELD_DECL)
292 if (aggregate_contains_union_type (TREE_TYPE (field)))
293 return true;
294
295 return false;
296}
297
1f6d3a08
RH
298/* A subroutine of expand_used_vars. If two variables X and Y have alias
299 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
300 in the interference graph. We also need to make sure to add conflicts
301 for union containing structures. Else RTL alias analysis comes along
302 and due to type based aliasing rules decides that for two overlapping
303 union temporaries { short s; int i; } accesses to the same mem through
304 different types may not alias and happily reorders stores across
305 life-time boundaries of the temporaries (See PR25654).
306 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
307
308static void
309add_alias_set_conflicts (void)
310{
311 size_t i, j, n = stack_vars_num;
312
313 for (i = 0; i < n; ++i)
314 {
a4d25453
RH
315 tree type_i = TREE_TYPE (stack_vars[i].decl);
316 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 317 bool contains_union;
1f6d3a08 318
d239ed56 319 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
320 for (j = 0; j < i; ++j)
321 {
a4d25453
RH
322 tree type_j = TREE_TYPE (stack_vars[j].decl);
323 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
324 if (aggr_i != aggr_j
325 /* Either the objects conflict by means of type based
326 aliasing rules, or we need to add a conflict. */
327 || !objects_must_conflict_p (type_i, type_j)
328 /* In case the types do not conflict ensure that access
329 to elements will conflict. In case of unions we have
330 to be careful as type based aliasing rules may say
331 access to the same memory does not conflict. So play
332 safe and add a conflict in this case. */
333 || contains_union)
1f6d3a08
RH
334 add_stack_var_conflict (i, j);
335 }
336 }
337}
338
339/* A subroutine of partition_stack_vars. A comparison function for qsort,
fa10beec 340 sorting an array of indices by the size of the object. */
1f6d3a08
RH
341
342static int
343stack_var_size_cmp (const void *a, const void *b)
344{
345 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
346 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
79f802f5
RG
347 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
348 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
1f6d3a08
RH
349
350 if (sa < sb)
351 return -1;
352 if (sa > sb)
353 return 1;
79f802f5
RG
354 /* For stack variables of the same size use the uid of the decl
355 to make the sort stable. */
356 if (uida < uidb)
357 return -1;
358 if (uida > uidb)
359 return 1;
1f6d3a08
RH
360 return 0;
361}
362
363/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
364 partitioning algorithm. Partitions A and B are known to be non-conflicting.
365 Merge them into a single partition A.
366
367 At the same time, add OFFSET to all variables in partition B. At the end
368 of the partitioning process we've have a nice block easy to lay out within
369 the stack frame. */
370
371static void
372union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
373{
374 size_t i, last;
375
376 /* Update each element of partition B with the given offset,
377 and merge them into partition A. */
378 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
379 {
380 stack_vars[i].offset += offset;
381 stack_vars[i].representative = a;
382 }
383 stack_vars[last].next = stack_vars[a].next;
384 stack_vars[a].next = b;
385
386 /* Update the required alignment of partition A to account for B. */
387 if (stack_vars[a].alignb < stack_vars[b].alignb)
388 stack_vars[a].alignb = stack_vars[b].alignb;
389
390 /* Update the interference graph and merge the conflicts. */
391 for (last = stack_vars_num, i = 0; i < last; ++i)
392 if (stack_var_conflict_p (b, i))
393 add_stack_var_conflict (a, i);
394}
395
396/* A subroutine of expand_used_vars. Binpack the variables into
397 partitions constrained by the interference graph. The overall
398 algorithm used is as follows:
399
400 Sort the objects by size.
401 For each object A {
402 S = size(A)
403 O = 0
404 loop {
405 Look for the largest non-conflicting object B with size <= S.
406 UNION (A, B)
407 offset(B) = O
408 O += size(B)
409 S -= size(B)
410 }
411 }
412*/
413
414static void
415partition_stack_vars (void)
416{
417 size_t si, sj, n = stack_vars_num;
418
419 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
420 for (si = 0; si < n; ++si)
421 stack_vars_sorted[si] = si;
422
423 if (n == 1)
424 return;
425
426 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
427
428 /* Special case: detect when all variables conflict, and thus we can't
429 do anything during the partitioning loop. It isn't uncommon (with
430 C code at least) to declare all variables at the top of the function,
431 and if we're not inlining, then all variables will be in the same scope.
432 Take advantage of very fast libc routines for this scan. */
433 gcc_assert (sizeof(bool) == sizeof(char));
434 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
435 return;
436
437 for (si = 0; si < n; ++si)
438 {
439 size_t i = stack_vars_sorted[si];
440 HOST_WIDE_INT isize = stack_vars[i].size;
441 HOST_WIDE_INT offset = 0;
442
443 for (sj = si; sj-- > 0; )
444 {
445 size_t j = stack_vars_sorted[sj];
446 HOST_WIDE_INT jsize = stack_vars[j].size;
447 unsigned int jalign = stack_vars[j].alignb;
448
449 /* Ignore objects that aren't partition representatives. */
450 if (stack_vars[j].representative != j)
451 continue;
452
453 /* Ignore objects too large for the remaining space. */
454 if (isize < jsize)
455 continue;
456
457 /* Ignore conflicting objects. */
458 if (stack_var_conflict_p (i, j))
459 continue;
460
461 /* Refine the remaining space check to include alignment. */
462 if (offset & (jalign - 1))
463 {
464 HOST_WIDE_INT toff = offset;
465 toff += jalign - 1;
466 toff &= -(HOST_WIDE_INT)jalign;
467 if (isize - (toff - offset) < jsize)
468 continue;
469
470 isize -= toff - offset;
471 offset = toff;
472 }
473
474 /* UNION the objects, placing J at OFFSET. */
475 union_stack_vars (i, j, offset);
476
477 isize -= jsize;
478 if (isize == 0)
479 break;
480 }
481 }
482}
483
484/* A debugging aid for expand_used_vars. Dump the generated partitions. */
485
486static void
487dump_stack_var_partition (void)
488{
489 size_t si, i, j, n = stack_vars_num;
490
491 for (si = 0; si < n; ++si)
492 {
493 i = stack_vars_sorted[si];
494
495 /* Skip variables that aren't partition representatives, for now. */
496 if (stack_vars[i].representative != i)
497 continue;
498
499 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
500 " align %u\n", (unsigned long) i, stack_vars[i].size,
501 stack_vars[i].alignb);
502
503 for (j = i; j != EOC; j = stack_vars[j].next)
504 {
505 fputc ('\t', dump_file);
506 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
507 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 508 stack_vars[j].offset);
1f6d3a08
RH
509 }
510 }
511}
512
513/* Assign rtl to DECL at frame offset OFFSET. */
514
515static void
516expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
517{
518 HOST_WIDE_INT align;
519 rtx x;
c22cacf3 520
1f6d3a08
RH
521 /* If this fails, we've overflowed the stack frame. Error nicely? */
522 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
523
524 x = plus_constant (virtual_stack_vars_rtx, offset);
525 x = gen_rtx_MEM (DECL_MODE (decl), x);
526
527 /* Set alignment we actually gave this decl. */
528 offset -= frame_phase;
529 align = offset & -offset;
530 align *= BITS_PER_UNIT;
531 if (align > STACK_BOUNDARY || align == 0)
532 align = STACK_BOUNDARY;
533 DECL_ALIGN (decl) = align;
534 DECL_USER_ALIGN (decl) = 0;
535
536 set_mem_attributes (x, decl, true);
537 SET_DECL_RTL (decl, x);
538}
539
540/* A subroutine of expand_used_vars. Give each partition representative
541 a unique location within the stack frame. Update each partition member
542 with that location. */
543
544static void
7d69de61 545expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
546{
547 size_t si, i, j, n = stack_vars_num;
548
549 for (si = 0; si < n; ++si)
550 {
551 HOST_WIDE_INT offset;
552
553 i = stack_vars_sorted[si];
554
555 /* Skip variables that aren't partition representatives, for now. */
556 if (stack_vars[i].representative != i)
557 continue;
558
7d69de61
RH
559 /* Skip variables that have already had rtl assigned. See also
560 add_stack_var where we perpetrate this pc_rtx hack. */
561 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
562 continue;
563
c22cacf3 564 /* Check the predicate to see whether this variable should be
7d69de61
RH
565 allocated in this pass. */
566 if (pred && !pred (stack_vars[i].decl))
567 continue;
568
1f6d3a08
RH
569 offset = alloc_stack_frame_space (stack_vars[i].size,
570 stack_vars[i].alignb);
571
572 /* Create rtl for each variable based on their location within the
573 partition. */
574 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
575 {
576 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
577 expand_one_stack_var_at (stack_vars[j].decl,
578 stack_vars[j].offset + offset);
579 }
1f6d3a08
RH
580 }
581}
582
ff28a94d
JH
583/* Take into account all sizes of partitions and reset DECL_RTLs. */
584static HOST_WIDE_INT
585account_stack_vars (void)
586{
587 size_t si, j, i, n = stack_vars_num;
588 HOST_WIDE_INT size = 0;
589
590 for (si = 0; si < n; ++si)
591 {
592 i = stack_vars_sorted[si];
593
594 /* Skip variables that aren't partition representatives, for now. */
595 if (stack_vars[i].representative != i)
596 continue;
597
598 size += stack_vars[i].size;
599 for (j = i; j != EOC; j = stack_vars[j].next)
600 SET_DECL_RTL (stack_vars[j].decl, NULL);
601 }
602 return size;
603}
604
1f6d3a08
RH
605/* A subroutine of expand_one_var. Called to immediately assign rtl
606 to a variable to be allocated in the stack frame. */
607
608static void
609expand_one_stack_var (tree var)
610{
611 HOST_WIDE_INT size, offset, align;
612
613 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
614 align = get_decl_align_unit (var);
615 offset = alloc_stack_frame_space (size, align);
616
617 expand_one_stack_var_at (var, offset);
618}
619
620/* A subroutine of expand_one_var. Called to assign rtl
621 to a TREE_STATIC VAR_DECL. */
622
623static void
624expand_one_static_var (tree var)
625{
3b9ade75
JH
626 /* In unit-at-a-time all the static variables are expanded at the end
627 of compilation process. */
628 if (flag_unit_at_a_time)
629 return;
1f6d3a08
RH
630 /* If this is an inlined copy of a static local variable,
631 look up the original. */
632 var = DECL_ORIGIN (var);
633
634 /* If we've already processed this variable because of that, do nothing. */
635 if (TREE_ASM_WRITTEN (var))
636 return;
637
1f6d3a08
RH
638 /* Otherwise, just emit the variable. */
639 rest_of_decl_compilation (var, 0, 0);
640}
641
642/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
643 that will reside in a hard register. */
644
645static void
646expand_one_hard_reg_var (tree var)
647{
648 rest_of_decl_compilation (var, 0, 0);
649}
650
651/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
652 that will reside in a pseudo register. */
653
654static void
655expand_one_register_var (tree var)
656{
657 tree type = TREE_TYPE (var);
658 int unsignedp = TYPE_UNSIGNED (type);
659 enum machine_mode reg_mode
660 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
661 rtx x = gen_reg_rtx (reg_mode);
662
663 SET_DECL_RTL (var, x);
664
665 /* Note if the object is a user variable. */
666 if (!DECL_ARTIFICIAL (var))
1f6d3a08
RH
667 mark_user_reg (x);
668
61021c2c
AP
669 if (POINTER_TYPE_P (type))
670 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
1f6d3a08
RH
671}
672
673/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 674 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
675 to pick something that won't crash the rest of the compiler. */
676
677static void
678expand_one_error_var (tree var)
679{
680 enum machine_mode mode = DECL_MODE (var);
681 rtx x;
682
683 if (mode == BLKmode)
684 x = gen_rtx_MEM (BLKmode, const0_rtx);
685 else if (mode == VOIDmode)
686 x = const0_rtx;
687 else
688 x = gen_reg_rtx (mode);
689
690 SET_DECL_RTL (var, x);
691}
692
c22cacf3 693/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
694 allocated to the local stack frame. Return true if we wish to
695 add VAR to STACK_VARS so that it will be coalesced with other
696 variables. Return false to allocate VAR immediately.
697
698 This function is used to reduce the number of variables considered
699 for coalescing, which reduces the size of the quadratic problem. */
700
701static bool
702defer_stack_allocation (tree var, bool toplevel)
703{
7d69de61
RH
704 /* If stack protection is enabled, *all* stack variables must be deferred,
705 so that we can re-order the strings to the top of the frame. */
706 if (flag_stack_protect)
707 return true;
708
1f6d3a08
RH
709 /* Variables in the outermost scope automatically conflict with
710 every other variable. The only reason to want to defer them
711 at all is that, after sorting, we can more efficiently pack
712 small variables in the stack frame. Continue to defer at -O2. */
713 if (toplevel && optimize < 2)
714 return false;
715
716 /* Without optimization, *most* variables are allocated from the
717 stack, which makes the quadratic problem large exactly when we
c22cacf3 718 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
719 other hand, we don't want the function's stack frame size to
720 get completely out of hand. So we avoid adding scalars and
721 "small" aggregates to the list at all. */
722 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
723 return false;
724
725 return true;
726}
727
728/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 729 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
730 expanded yet, merely recorded.
731 When REALLY_EXPAND is false, only add stack values to be allocated.
732 Return stack usage this variable is supposed to take.
733*/
1f6d3a08 734
ff28a94d
JH
735static HOST_WIDE_INT
736expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08
RH
737{
738 if (TREE_CODE (var) != VAR_DECL)
4846b435 739 ;
1f6d3a08
RH
740 else if (DECL_EXTERNAL (var))
741 ;
833b3afe 742 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
743 ;
744 else if (TREE_STATIC (var))
ff28a94d
JH
745 {
746 if (really_expand)
747 expand_one_static_var (var);
748 }
1f6d3a08
RH
749 else if (DECL_RTL_SET_P (var))
750 ;
751 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
752 {
753 if (really_expand)
754 expand_one_error_var (var);
755 }
1f6d3a08 756 else if (DECL_HARD_REGISTER (var))
ff28a94d
JH
757 {
758 if (really_expand)
759 expand_one_hard_reg_var (var);
760 }
1f6d3a08 761 else if (use_register_for_decl (var))
ff28a94d
JH
762 {
763 if (really_expand)
764 expand_one_register_var (var);
765 }
1f6d3a08
RH
766 else if (defer_stack_allocation (var, toplevel))
767 add_stack_var (var);
768 else
ff28a94d 769 {
bd9f1b4b
JH
770 if (really_expand)
771 expand_one_stack_var (var);
ff28a94d
JH
772 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
773 }
774 return 0;
1f6d3a08
RH
775}
776
777/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
778 expanding variables. Those variables that can be put into registers
779 are allocated pseudos; those that can't are put on the stack.
780
781 TOPLEVEL is true if this is the outermost BLOCK. */
782
783static void
784expand_used_vars_for_block (tree block, bool toplevel)
785{
786 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
787 tree t;
788
789 old_sv_num = toplevel ? 0 : stack_vars_num;
790
791 /* Expand all variables at this level. */
792 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1368453c
JH
793 if (TREE_USED (t)
794 /* Force local static variables to be output when marked by
795 used attribute. For unit-at-a-time, cgraph code already takes
796 care of this. */
797 || (!flag_unit_at_a_time && TREE_STATIC (t)
798 && DECL_PRESERVE_P (t)))
ff28a94d 799 expand_one_var (t, toplevel, true);
1f6d3a08
RH
800
801 this_sv_num = stack_vars_num;
802
803 /* Expand all variables at containing levels. */
804 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
805 expand_used_vars_for_block (t, false);
806
807 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 808 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
809 tree in the interference graph. Here we cause all variables at this
810 level, and all sublevels, to conflict. Do make certain that a
811 variable conflicts with itself. */
812 if (old_sv_num < this_sv_num)
813 {
814 new_sv_num = stack_vars_num;
815 resize_stack_vars_conflict (new_sv_num);
816
817 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
818 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
819 add_stack_var_conflict (i, j);
1f6d3a08
RH
820 }
821}
822
823/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
824 and clear TREE_USED on all local variables. */
825
826static void
827clear_tree_used (tree block)
828{
829 tree t;
830
831 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
832 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
833 TREE_USED (t) = 0;
834
835 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
836 clear_tree_used (t);
837}
838
7d69de61
RH
839/* Examine TYPE and determine a bit mask of the following features. */
840
841#define SPCT_HAS_LARGE_CHAR_ARRAY 1
842#define SPCT_HAS_SMALL_CHAR_ARRAY 2
843#define SPCT_HAS_ARRAY 4
844#define SPCT_HAS_AGGREGATE 8
845
846static unsigned int
847stack_protect_classify_type (tree type)
848{
849 unsigned int ret = 0;
850 tree t;
851
852 switch (TREE_CODE (type))
853 {
854 case ARRAY_TYPE:
855 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
856 if (t == char_type_node
857 || t == signed_char_type_node
858 || t == unsigned_char_type_node)
859 {
15362b89
JJ
860 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
861 unsigned HOST_WIDE_INT len;
7d69de61 862
15362b89
JJ
863 if (!TYPE_SIZE_UNIT (type)
864 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
865 len = max;
7d69de61 866 else
15362b89 867 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
868
869 if (len < max)
870 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
871 else
872 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
873 }
874 else
875 ret = SPCT_HAS_ARRAY;
876 break;
877
878 case UNION_TYPE:
879 case QUAL_UNION_TYPE:
880 case RECORD_TYPE:
881 ret = SPCT_HAS_AGGREGATE;
882 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
883 if (TREE_CODE (t) == FIELD_DECL)
884 ret |= stack_protect_classify_type (TREE_TYPE (t));
885 break;
886
887 default:
888 break;
889 }
890
891 return ret;
892}
893
a4d05547
KH
894/* Return nonzero if DECL should be segregated into the "vulnerable" upper
895 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
896 any variable in this function. The return value is the phase number in
897 which the variable should be allocated. */
898
899static int
900stack_protect_decl_phase (tree decl)
901{
902 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
903 int ret = 0;
904
905 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
906 has_short_buffer = true;
907
908 if (flag_stack_protect == 2)
909 {
910 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
911 && !(bits & SPCT_HAS_AGGREGATE))
912 ret = 1;
913 else if (bits & SPCT_HAS_ARRAY)
914 ret = 2;
915 }
916 else
917 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
918
919 if (ret)
920 has_protected_decls = true;
921
922 return ret;
923}
924
925/* Two helper routines that check for phase 1 and phase 2. These are used
926 as callbacks for expand_stack_vars. */
927
928static bool
929stack_protect_decl_phase_1 (tree decl)
930{
931 return stack_protect_decl_phase (decl) == 1;
932}
933
934static bool
935stack_protect_decl_phase_2 (tree decl)
936{
937 return stack_protect_decl_phase (decl) == 2;
938}
939
940/* Ensure that variables in different stack protection phases conflict
941 so that they are not merged and share the same stack slot. */
942
943static void
944add_stack_protection_conflicts (void)
945{
946 size_t i, j, n = stack_vars_num;
947 unsigned char *phase;
948
949 phase = XNEWVEC (unsigned char, n);
950 for (i = 0; i < n; ++i)
951 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
952
953 for (i = 0; i < n; ++i)
954 {
955 unsigned char ph_i = phase[i];
956 for (j = 0; j < i; ++j)
957 if (ph_i != phase[j])
958 add_stack_var_conflict (i, j);
959 }
960
961 XDELETEVEC (phase);
962}
963
964/* Create a decl for the guard at the top of the stack frame. */
965
966static void
967create_stack_guard (void)
968{
969 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
970 TREE_THIS_VOLATILE (guard) = 1;
971 TREE_USED (guard) = 1;
972 expand_one_stack_var (guard);
cb91fab0 973 crtl->stack_protect_guard = guard;
7d69de61
RH
974}
975
ff28a94d
JH
976/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
977 expanding variables. Those variables that can be put into registers
978 are allocated pseudos; those that can't are put on the stack.
979
980 TOPLEVEL is true if this is the outermost BLOCK. */
981
982static HOST_WIDE_INT
983account_used_vars_for_block (tree block, bool toplevel)
984{
985 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
986 tree t;
987 HOST_WIDE_INT size = 0;
988
989 old_sv_num = toplevel ? 0 : stack_vars_num;
990
991 /* Expand all variables at this level. */
992 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
993 if (TREE_USED (t))
994 size += expand_one_var (t, toplevel, false);
995
996 this_sv_num = stack_vars_num;
997
998 /* Expand all variables at containing levels. */
999 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1000 size += account_used_vars_for_block (t, false);
1001
1002 /* Since we do not track exact variable lifetimes (which is not even
1003 possible for variables whose address escapes), we mirror the block
1004 tree in the interference graph. Here we cause all variables at this
1005 level, and all sublevels, to conflict. Do make certain that a
1006 variable conflicts with itself. */
1007 if (old_sv_num < this_sv_num)
1008 {
1009 new_sv_num = stack_vars_num;
1010 resize_stack_vars_conflict (new_sv_num);
1011
1012 for (i = old_sv_num; i < new_sv_num; ++i)
1013 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1014 add_stack_var_conflict (i, j);
1015 }
1016 return size;
1017}
1018
1019/* Prepare for expanding variables. */
1020static void
1021init_vars_expansion (void)
1022{
1023 tree t;
cb91fab0
JH
1024 /* Set TREE_USED on all variables in the local_decls. */
1025 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1026 TREE_USED (TREE_VALUE (t)) = 1;
1027
1028 /* Clear TREE_USED on all variables associated with a block scope. */
1029 clear_tree_used (DECL_INITIAL (current_function_decl));
1030
1031 /* Initialize local stack smashing state. */
1032 has_protected_decls = false;
1033 has_short_buffer = false;
1034}
1035
1036/* Free up stack variable graph data. */
1037static void
1038fini_vars_expansion (void)
1039{
1040 XDELETEVEC (stack_vars);
1041 XDELETEVEC (stack_vars_sorted);
1042 XDELETEVEC (stack_vars_conflict);
1043 stack_vars = NULL;
1044 stack_vars_alloc = stack_vars_num = 0;
1045 stack_vars_conflict = NULL;
1046 stack_vars_conflict_alloc = 0;
1047}
1048
1049HOST_WIDE_INT
1050estimated_stack_frame_size (void)
1051{
1052 HOST_WIDE_INT size = 0;
1053 tree t, outer_block = DECL_INITIAL (current_function_decl);
1054
1055 init_vars_expansion ();
1056
cb91fab0 1057 /* At this point all variables on the local_decls with TREE_USED
ff28a94d 1058 set are not associated with any block scope. Lay them out. */
cb91fab0 1059 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1060 {
1061 tree var = TREE_VALUE (t);
1062
1063 if (TREE_USED (var))
1064 size += expand_one_var (var, true, false);
1065 TREE_USED (var) = 1;
1066 }
1067 size += account_used_vars_for_block (outer_block, true);
1068 if (stack_vars_num > 0)
1069 {
1070 /* Due to the way alias sets work, no variables with non-conflicting
1071 alias sets may be assigned the same address. Add conflicts to
1072 reflect this. */
1073 add_alias_set_conflicts ();
1074
1075 /* If stack protection is enabled, we don't share space between
1076 vulnerable data and non-vulnerable data. */
1077 if (flag_stack_protect)
1078 add_stack_protection_conflicts ();
1079
1080 /* Now that we have collected all stack variables, and have computed a
1081 minimal interference graph, attempt to save some stack space. */
1082 partition_stack_vars ();
1083 if (dump_file)
1084 dump_stack_var_partition ();
1085
1086 size += account_stack_vars ();
1087 fini_vars_expansion ();
1088 }
1089 return size;
1090}
1091
1f6d3a08 1092/* Expand all variables used in the function. */
727a31fa
RH
1093
1094static void
1095expand_used_vars (void)
1096{
1f6d3a08 1097 tree t, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 1098
1f6d3a08
RH
1099 /* Compute the phase of the stack frame for this function. */
1100 {
1101 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1102 int off = STARTING_FRAME_OFFSET % align;
1103 frame_phase = off ? align - off : 0;
1104 }
727a31fa 1105
ff28a94d 1106 init_vars_expansion ();
7d69de61 1107
cb91fab0 1108 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1109 set are not associated with any block scope. Lay them out. */
cb91fab0 1110 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1f6d3a08
RH
1111 {
1112 tree var = TREE_VALUE (t);
1113 bool expand_now = false;
1114
1115 /* We didn't set a block for static or extern because it's hard
1116 to tell the difference between a global variable (re)declared
1117 in a local scope, and one that's really declared there to
1118 begin with. And it doesn't really matter much, since we're
1119 not giving them stack space. Expand them now. */
1120 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1121 expand_now = true;
1122
1123 /* Any variable that could have been hoisted into an SSA_NAME
1124 will have been propagated anywhere the optimizers chose,
1125 i.e. not confined to their original block. Allocate them
1126 as if they were defined in the outermost scope. */
1127 else if (is_gimple_reg (var))
1128 expand_now = true;
1129
1130 /* If the variable is not associated with any block, then it
1131 was created by the optimizers, and could be live anywhere
1132 in the function. */
1133 else if (TREE_USED (var))
1134 expand_now = true;
1135
1136 /* Finally, mark all variables on the list as used. We'll use
1137 this in a moment when we expand those associated with scopes. */
1138 TREE_USED (var) = 1;
1139
1140 if (expand_now)
ff28a94d 1141 expand_one_var (var, true, true);
1f6d3a08 1142 }
cb91fab0 1143 cfun->local_decls = NULL_TREE;
1f6d3a08
RH
1144
1145 /* At this point, all variables within the block tree with TREE_USED
1146 set are actually used by the optimized function. Lay them out. */
1147 expand_used_vars_for_block (outer_block, true);
1148
1149 if (stack_vars_num > 0)
1150 {
1151 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1152 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1153 reflect this. */
1154 add_alias_set_conflicts ();
1155
c22cacf3 1156 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1157 vulnerable data and non-vulnerable data. */
1158 if (flag_stack_protect)
1159 add_stack_protection_conflicts ();
1160
c22cacf3 1161 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1162 minimal interference graph, attempt to save some stack space. */
1163 partition_stack_vars ();
1164 if (dump_file)
1165 dump_stack_var_partition ();
7d69de61
RH
1166 }
1167
1168 /* There are several conditions under which we should create a
1169 stack guard: protect-all, alloca used, protected decls present. */
1170 if (flag_stack_protect == 2
1171 || (flag_stack_protect
e3b5732b 1172 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1173 create_stack_guard ();
1f6d3a08 1174
7d69de61
RH
1175 /* Assign rtl to each variable based on these partitions. */
1176 if (stack_vars_num > 0)
1177 {
1178 /* Reorder decls to be protected by iterating over the variables
1179 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1180 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1181 earlier, such that we naturally see these variables first,
1182 and thus naturally allocate things in the right order. */
1183 if (has_protected_decls)
1184 {
1185 /* Phase 1 contains only character arrays. */
1186 expand_stack_vars (stack_protect_decl_phase_1);
1187
1188 /* Phase 2 contains other kinds of arrays. */
1189 if (flag_stack_protect == 2)
1190 expand_stack_vars (stack_protect_decl_phase_2);
1191 }
1192
1193 expand_stack_vars (NULL);
1f6d3a08 1194
ff28a94d 1195 fini_vars_expansion ();
1f6d3a08
RH
1196 }
1197
1198 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1199 if (STACK_ALIGNMENT_NEEDED)
1200 {
1201 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1202 if (!FRAME_GROWS_DOWNWARD)
1203 frame_offset += align - 1;
1204 frame_offset &= -align;
1205 }
727a31fa
RH
1206}
1207
1208
b7211528
SB
1209/* If we need to produce a detailed dump, print the tree representation
1210 for STMT to the dump file. SINCE is the last RTX after which the RTL
1211 generated for STMT should have been appended. */
1212
1213static void
1214maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1215{
1216 if (dump_file && (dump_flags & TDF_DETAILS))
1217 {
1218 fprintf (dump_file, "\n;; ");
1219 print_generic_expr (dump_file, stmt, TDF_SLIM);
1220 fprintf (dump_file, "\n");
1221
1222 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1223 }
1224}
1225
8b11009b
ZD
1226/* Maps the blocks that do not contain tree labels to rtx labels. */
1227
1228static struct pointer_map_t *lab_rtx_for_bb;
1229
a9b77cd1
ZD
1230/* Returns the label_rtx expression for a label starting basic block BB. */
1231
1232static rtx
1233label_rtx_for_bb (basic_block bb)
1234{
1235 tree_stmt_iterator tsi;
1236 tree lab, lab_stmt;
8b11009b 1237 void **elt;
a9b77cd1
ZD
1238
1239 if (bb->flags & BB_RTL)
1240 return block_label (bb);
1241
8b11009b
ZD
1242 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1243 if (elt)
ae50c0cb 1244 return (rtx) *elt;
8b11009b
ZD
1245
1246 /* Find the tree label if it is present. */
1247
a9b77cd1
ZD
1248 for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
1249 {
1250 lab_stmt = tsi_stmt (tsi);
1251 if (TREE_CODE (lab_stmt) != LABEL_EXPR)
1252 break;
1253
1254 lab = LABEL_EXPR_LABEL (lab_stmt);
1255 if (DECL_NONLOCAL (lab))
1256 break;
1257
1258 return label_rtx (lab);
1259 }
1260
8b11009b
ZD
1261 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1262 *elt = gen_label_rtx ();
ae50c0cb 1263 return (rtx) *elt;
a9b77cd1
ZD
1264}
1265
80c7a9eb
RH
1266/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1267 Returns a new basic block if we've terminated the current basic
1268 block and created a new one. */
1269
1270static basic_block
1271expand_gimple_cond_expr (basic_block bb, tree stmt)
1272{
1273 basic_block new_bb, dest;
1274 edge new_edge;
1275 edge true_edge;
1276 edge false_edge;
1277 tree pred = COND_EXPR_COND (stmt);
b7211528
SB
1278 rtx last2, last;
1279
a9b77cd1
ZD
1280 gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
1281 gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
b7211528 1282 last2 = last = get_last_insn ();
80c7a9eb
RH
1283
1284 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1285 if (EXPR_LOCUS (stmt))
1286 {
55e092c4
JH
1287 set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
1288 set_curr_insn_block (TREE_BLOCK (stmt));
80c7a9eb
RH
1289 }
1290
1291 /* These flags have no purpose in RTL land. */
1292 true_edge->flags &= ~EDGE_TRUE_VALUE;
1293 false_edge->flags &= ~EDGE_FALSE_VALUE;
1294
1295 /* We can either have a pure conditional jump with one fallthru edge or
1296 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1297 if (false_edge->dest == bb->next_bb)
80c7a9eb 1298 {
a9b77cd1 1299 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1300 add_reg_br_prob_note (last, true_edge->probability);
b7211528 1301 maybe_dump_rtl_for_tree_stmt (stmt, last);
a9b77cd1 1302 if (true_edge->goto_locus)
2d593c86 1303 set_curr_insn_source_location (true_edge->goto_locus);
a9b77cd1 1304 false_edge->flags |= EDGE_FALLTHRU;
80c7a9eb
RH
1305 return NULL;
1306 }
a9b77cd1 1307 if (true_edge->dest == bb->next_bb)
80c7a9eb 1308 {
a9b77cd1 1309 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
10d22567 1310 add_reg_br_prob_note (last, false_edge->probability);
b7211528 1311 maybe_dump_rtl_for_tree_stmt (stmt, last);
a9b77cd1 1312 if (false_edge->goto_locus)
2d593c86 1313 set_curr_insn_source_location (false_edge->goto_locus);
a9b77cd1 1314 true_edge->flags |= EDGE_FALLTHRU;
80c7a9eb
RH
1315 return NULL;
1316 }
80c7a9eb 1317
a9b77cd1 1318 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1319 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1320 last = get_last_insn ();
a9b77cd1 1321 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1322
1323 BB_END (bb) = last;
1324 if (BARRIER_P (BB_END (bb)))
1325 BB_END (bb) = PREV_INSN (BB_END (bb));
1326 update_bb_for_insn (bb);
1327
1328 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1329 dest = false_edge->dest;
1330 redirect_edge_succ (false_edge, new_bb);
1331 false_edge->flags |= EDGE_FALLTHRU;
1332 new_bb->count = false_edge->count;
1333 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1334 new_edge = make_edge (new_bb, dest, 0);
1335 new_edge->probability = REG_BR_PROB_BASE;
1336 new_edge->count = new_bb->count;
1337 if (BARRIER_P (BB_END (new_bb)))
1338 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1339 update_bb_for_insn (new_bb);
1340
b7211528 1341 maybe_dump_rtl_for_tree_stmt (stmt, last2);
c22cacf3 1342
a9b77cd1 1343 if (false_edge->goto_locus)
2d593c86 1344 set_curr_insn_source_location (false_edge->goto_locus);
80c7a9eb
RH
1345
1346 return new_bb;
1347}
1348
1349/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
224e770b
RH
1350 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1351 generated a tail call (something that might be denied by the ABI
cea49550
RH
1352 rules governing the call; see calls.c).
1353
1354 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1355 can still reach the rest of BB. The case here is __builtin_sqrt,
1356 where the NaN result goes through the external function (with a
1357 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1358
1359static basic_block
cea49550 1360expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
80c7a9eb 1361{
b7211528 1362 rtx last2, last;
224e770b 1363 edge e;
628f6a4e 1364 edge_iterator ei;
224e770b
RH
1365 int probability;
1366 gcov_type count;
80c7a9eb 1367
b7211528
SB
1368 last2 = last = get_last_insn ();
1369
80c7a9eb
RH
1370 expand_expr_stmt (stmt);
1371
1372 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1373 if (CALL_P (last) && SIBLING_CALL_P (last))
1374 goto found;
80c7a9eb 1375
7315a949 1376 maybe_dump_rtl_for_tree_stmt (stmt, last2);
b7211528 1377
cea49550 1378 *can_fallthru = true;
224e770b 1379 return NULL;
80c7a9eb 1380
224e770b
RH
1381 found:
1382 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1383 Any instructions emitted here are about to be deleted. */
1384 do_pending_stack_adjust ();
1385
1386 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1387 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1388 EH or abnormal edges, we shouldn't have created a tail call in
1389 the first place. So it seems to me we should just be removing
1390 all edges here, or redirecting the existing fallthru edge to
1391 the exit block. */
1392
224e770b
RH
1393 probability = 0;
1394 count = 0;
224e770b 1395
628f6a4e
BE
1396 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1397 {
224e770b
RH
1398 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1399 {
1400 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1401 {
224e770b
RH
1402 e->dest->count -= e->count;
1403 e->dest->frequency -= EDGE_FREQUENCY (e);
1404 if (e->dest->count < 0)
c22cacf3 1405 e->dest->count = 0;
224e770b 1406 if (e->dest->frequency < 0)
c22cacf3 1407 e->dest->frequency = 0;
80c7a9eb 1408 }
224e770b
RH
1409 count += e->count;
1410 probability += e->probability;
1411 remove_edge (e);
80c7a9eb 1412 }
628f6a4e
BE
1413 else
1414 ei_next (&ei);
80c7a9eb
RH
1415 }
1416
224e770b
RH
1417 /* This is somewhat ugly: the call_expr expander often emits instructions
1418 after the sibcall (to perform the function return). These confuse the
12eff7b7 1419 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1420 last = NEXT_INSN (last);
341c100f 1421 gcc_assert (BARRIER_P (last));
cea49550
RH
1422
1423 *can_fallthru = false;
224e770b
RH
1424 while (NEXT_INSN (last))
1425 {
1426 /* For instance an sqrt builtin expander expands if with
1427 sibcall in the then and label for `else`. */
1428 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1429 {
1430 *can_fallthru = true;
1431 break;
1432 }
224e770b
RH
1433 delete_insn (NEXT_INSN (last));
1434 }
1435
1436 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1437 e->probability += probability;
1438 e->count += count;
1439 BB_END (bb) = last;
1440 update_bb_for_insn (bb);
1441
1442 if (NEXT_INSN (last))
1443 {
1444 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1445
1446 last = BB_END (bb);
1447 if (BARRIER_P (last))
1448 BB_END (bb) = PREV_INSN (last);
1449 }
1450
b7211528
SB
1451 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1452
224e770b 1453 return bb;
80c7a9eb
RH
1454}
1455
242229bb
JH
1456/* Expand basic block BB from GIMPLE trees to RTL. */
1457
1458static basic_block
10d22567 1459expand_gimple_basic_block (basic_block bb)
242229bb 1460{
7506e1cb
ZD
1461 tree_stmt_iterator tsi;
1462 tree stmts = bb_stmt_list (bb);
242229bb
JH
1463 tree stmt = NULL;
1464 rtx note, last;
1465 edge e;
628f6a4e 1466 edge_iterator ei;
8b11009b 1467 void **elt;
242229bb
JH
1468
1469 if (dump_file)
1470 {
b7211528
SB
1471 fprintf (dump_file,
1472 "\n;; Generating RTL for tree basic block %d\n",
1473 bb->index);
242229bb
JH
1474 }
1475
7506e1cb 1476 bb->il.tree = NULL;
5e2d947c
JH
1477 init_rtl_bb_info (bb);
1478 bb->flags |= BB_RTL;
1479
a9b77cd1
ZD
1480 /* Remove the RETURN_EXPR if we may fall though to the exit
1481 instead. */
1482 tsi = tsi_last (stmts);
1483 if (!tsi_end_p (tsi)
1484 && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
1485 {
1486 tree ret_stmt = tsi_stmt (tsi);
1487
1488 gcc_assert (single_succ_p (bb));
1489 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1490
1491 if (bb->next_bb == EXIT_BLOCK_PTR
1492 && !TREE_OPERAND (ret_stmt, 0))
1493 {
1494 tsi_delink (&tsi);
1495 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1496 }
1497 }
1498
7506e1cb
ZD
1499 tsi = tsi_start (stmts);
1500 if (!tsi_end_p (tsi))
8b11009b
ZD
1501 {
1502 stmt = tsi_stmt (tsi);
1503 if (TREE_CODE (stmt) != LABEL_EXPR)
1504 stmt = NULL_TREE;
1505 }
242229bb 1506
8b11009b
ZD
1507 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1508
1509 if (stmt || elt)
242229bb
JH
1510 {
1511 last = get_last_insn ();
1512
8b11009b
ZD
1513 if (stmt)
1514 {
1515 expand_expr_stmt (stmt);
1516 tsi_next (&tsi);
1517 }
1518
1519 if (elt)
ae50c0cb 1520 emit_label ((rtx) *elt);
242229bb 1521
caf93cb0 1522 /* Java emits line number notes in the top of labels.
c22cacf3 1523 ??? Make this go away once line number notes are obsoleted. */
242229bb 1524 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 1525 if (NOTE_P (BB_HEAD (bb)))
242229bb 1526 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 1527 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528
SB
1528
1529 maybe_dump_rtl_for_tree_stmt (stmt, last);
242229bb
JH
1530 }
1531 else
1532 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1533
1534 NOTE_BASIC_BLOCK (note) = bb;
1535
628f6a4e 1536 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
242229bb 1537 {
242229bb
JH
1538 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1539 e->flags &= ~EDGE_EXECUTABLE;
1540
1541 /* At the moment not all abnormal edges match the RTL representation.
c22cacf3
MS
1542 It is safe to remove them here as find_many_sub_basic_blocks will
1543 rediscover them. In the future we should get this fixed properly. */
242229bb
JH
1544 if (e->flags & EDGE_ABNORMAL)
1545 remove_edge (e);
628f6a4e
BE
1546 else
1547 ei_next (&ei);
242229bb
JH
1548 }
1549
7506e1cb 1550 for (; !tsi_end_p (tsi); tsi_next (&tsi))
242229bb 1551 {
7506e1cb 1552 tree stmt = tsi_stmt (tsi);
cea49550 1553 basic_block new_bb;
242229bb
JH
1554
1555 if (!stmt)
1556 continue;
1557
1558 /* Expand this statement, then evaluate the resulting RTL and
1559 fixup the CFG accordingly. */
80c7a9eb 1560 if (TREE_CODE (stmt) == COND_EXPR)
cea49550
RH
1561 {
1562 new_bb = expand_gimple_cond_expr (bb, stmt);
1563 if (new_bb)
1564 return new_bb;
1565 }
80c7a9eb 1566 else
242229bb 1567 {
80c7a9eb 1568 tree call = get_call_expr_in (stmt);
4437b50d
JH
1569 int region;
1570 /* For the benefit of calls.c, converting all this to rtl,
1571 we need to record the call expression, not just the outer
1572 modify statement. */
079a182e
JH
1573 if (call && call != stmt)
1574 {
1575 if ((region = lookup_stmt_eh_region (stmt)) > 0)
1576 add_stmt_to_eh_region (call, region);
1577 gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
1578 }
80c7a9eb 1579 if (call && CALL_EXPR_TAILCALL (call))
cea49550
RH
1580 {
1581 bool can_fallthru;
1582 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1583 if (new_bb)
1584 {
1585 if (can_fallthru)
1586 bb = new_bb;
1587 else
1588 return new_bb;
1589 }
1590 }
80c7a9eb 1591 else
b7211528
SB
1592 {
1593 last = get_last_insn ();
1594 expand_expr_stmt (stmt);
1595 maybe_dump_rtl_for_tree_stmt (stmt, last);
1596 }
242229bb
JH
1597 }
1598 }
1599
a9b77cd1
ZD
1600 /* Expand implicit goto. */
1601 FOR_EACH_EDGE (e, ei, bb->succs)
1602 {
1603 if (e->flags & EDGE_FALLTHRU)
1604 break;
1605 }
1606
1607 if (e && e->dest != bb->next_bb)
1608 {
1609 emit_jump (label_rtx_for_bb (e->dest));
1610 if (e->goto_locus)
2d593c86 1611 set_curr_insn_source_location (e->goto_locus);
a9b77cd1
ZD
1612 e->flags &= ~EDGE_FALLTHRU;
1613 }
1614
242229bb
JH
1615 do_pending_stack_adjust ();
1616
3f117656 1617 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
1618 before a barrier and/or table jump insn. */
1619 last = get_last_insn ();
4b4bf941 1620 if (BARRIER_P (last))
242229bb
JH
1621 last = PREV_INSN (last);
1622 if (JUMP_TABLE_DATA_P (last))
1623 last = PREV_INSN (PREV_INSN (last));
1624 BB_END (bb) = last;
caf93cb0 1625
242229bb 1626 update_bb_for_insn (bb);
80c7a9eb 1627
242229bb
JH
1628 return bb;
1629}
1630
1631
1632/* Create a basic block for initialization code. */
1633
1634static basic_block
1635construct_init_block (void)
1636{
1637 basic_block init_block, first_block;
fd44f634
JH
1638 edge e = NULL;
1639 int flags;
275a4187 1640
fd44f634
JH
1641 /* Multiple entry points not supported yet. */
1642 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
1643 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1644 init_rtl_bb_info (EXIT_BLOCK_PTR);
1645 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1646 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 1647
fd44f634 1648 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 1649
fd44f634
JH
1650 /* When entry edge points to first basic block, we don't need jump,
1651 otherwise we have to jump into proper target. */
1652 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1653 {
1654 tree label = tree_block_label (e->dest);
1655
1656 emit_jump (label_rtx (label));
1657 flags = 0;
275a4187 1658 }
fd44f634
JH
1659 else
1660 flags = EDGE_FALLTHRU;
242229bb
JH
1661
1662 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1663 get_last_insn (),
1664 ENTRY_BLOCK_PTR);
1665 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1666 init_block->count = ENTRY_BLOCK_PTR->count;
1667 if (e)
1668 {
1669 first_block = e->dest;
1670 redirect_edge_succ (e, init_block);
fd44f634 1671 e = make_edge (init_block, first_block, flags);
242229bb
JH
1672 }
1673 else
1674 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1675 e->probability = REG_BR_PROB_BASE;
1676 e->count = ENTRY_BLOCK_PTR->count;
1677
1678 update_bb_for_insn (init_block);
1679 return init_block;
1680}
1681
55e092c4
JH
1682/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
1683 found in the block tree. */
1684
1685static void
1686set_block_levels (tree block, int level)
1687{
1688 while (block)
1689 {
1690 BLOCK_NUMBER (block) = level;
1691 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
1692 block = BLOCK_CHAIN (block);
1693 }
1694}
242229bb
JH
1695
1696/* Create a block containing landing pads and similar stuff. */
1697
1698static void
1699construct_exit_block (void)
1700{
1701 rtx head = get_last_insn ();
1702 rtx end;
1703 basic_block exit_block;
628f6a4e
BE
1704 edge e, e2;
1705 unsigned ix;
1706 edge_iterator ei;
071a42f9 1707 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 1708
caf93cb0 1709 /* Make sure the locus is set to the end of the function, so that
242229bb 1710 epilogue line numbers and warnings are set properly. */
6773e15f 1711 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
1712 input_location = cfun->function_end_locus;
1713
1714 /* The following insns belong to the top scope. */
55e092c4 1715 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 1716
242229bb
JH
1717 /* Generate rtl for function exit. */
1718 expand_function_end ();
1719
1720 end = get_last_insn ();
1721 if (head == end)
1722 return;
071a42f9
JH
1723 /* While emitting the function end we could move end of the last basic block.
1724 */
1725 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 1726 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 1727 head = NEXT_INSN (head);
80c7a9eb
RH
1728 exit_block = create_basic_block (NEXT_INSN (head), end,
1729 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
1730 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1731 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
1732
1733 ix = 0;
1734 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 1735 {
8fb790fd 1736 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 1737 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
1738 redirect_edge_succ (e, exit_block);
1739 else
1740 ix++;
242229bb 1741 }
628f6a4e 1742
242229bb
JH
1743 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1744 e->probability = REG_BR_PROB_BASE;
1745 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 1746 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
1747 if (e2 != e)
1748 {
c22cacf3 1749 e->count -= e2->count;
242229bb
JH
1750 exit_block->count -= e2->count;
1751 exit_block->frequency -= EDGE_FREQUENCY (e2);
1752 }
1753 if (e->count < 0)
1754 e->count = 0;
1755 if (exit_block->count < 0)
1756 exit_block->count = 0;
1757 if (exit_block->frequency < 0)
1758 exit_block->frequency = 0;
1759 update_bb_for_insn (exit_block);
1760}
1761
c22cacf3 1762/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
1763 Look for ARRAY_REF nodes with non-constant indexes and mark them
1764 addressable. */
1765
1766static tree
1767discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1768 void *data ATTRIBUTE_UNUSED)
1769{
1770 tree t = *tp;
1771
1772 if (IS_TYPE_OR_DECL_P (t))
1773 *walk_subtrees = 0;
1774 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1775 {
1776 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1777 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1778 && (!TREE_OPERAND (t, 2)
1779 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1780 || (TREE_CODE (t) == COMPONENT_REF
1781 && (!TREE_OPERAND (t,2)
1782 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1783 || TREE_CODE (t) == BIT_FIELD_REF
1784 || TREE_CODE (t) == REALPART_EXPR
1785 || TREE_CODE (t) == IMAGPART_EXPR
1786 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 1787 || CONVERT_EXPR_P (t))
a1b23b2f
UW
1788 t = TREE_OPERAND (t, 0);
1789
1790 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1791 {
1792 t = get_base_address (t);
1793 if (t && DECL_P (t))
1794 TREE_ADDRESSABLE (t) = 1;
1795 }
1796
1797 *walk_subtrees = 0;
1798 }
1799
1800 return NULL_TREE;
1801}
1802
1803/* RTL expansion is not able to compile array references with variable
1804 offsets for arrays stored in single register. Discover such
1805 expressions and mark variables as addressable to avoid this
1806 scenario. */
1807
1808static void
1809discover_nonconstant_array_refs (void)
1810{
1811 basic_block bb;
1812 block_stmt_iterator bsi;
1813
1814 FOR_EACH_BB (bb)
1815 {
1816 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1817 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1818 NULL , NULL);
1819 }
1820}
1821
242229bb
JH
1822/* Translate the intermediate representation contained in the CFG
1823 from GIMPLE trees to RTL.
1824
1825 We do conversion per basic block and preserve/update the tree CFG.
1826 This implies we have to do some magic as the CFG can simultaneously
1827 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 1828 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
1829 the expansion. */
1830
c2924966 1831static unsigned int
242229bb
JH
1832tree_expand_cfg (void)
1833{
1834 basic_block bb, init_block;
1835 sbitmap blocks;
0ef90296
ZD
1836 edge_iterator ei;
1837 edge e;
242229bb 1838
4586b4ca
SB
1839 /* Some backends want to know that we are expanding to RTL. */
1840 currently_expanding_to_rtl = 1;
1841
55e092c4
JH
1842 insn_locators_alloc ();
1843 if (!DECL_BUILT_IN (current_function_decl))
1844 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl));
1845 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1846 prologue_locator = curr_insn_locator ();
1847
1848 /* Make sure first insn is a note even if we don't want linenums.
1849 This makes sure the first insn will never be deleted.
1850 Also, final expects a note to appear there. */
1851 emit_note (NOTE_INSN_DELETED);
6429e3be 1852
a1b23b2f
UW
1853 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1854 discover_nonconstant_array_refs ();
1855
e41b2a33 1856 targetm.expand_to_rtl_hook ();
cb91fab0
JH
1857 crtl->stack_alignment_needed = STACK_BOUNDARY;
1858 crtl->preferred_stack_boundary = STACK_BOUNDARY;
1859 cfun->cfg->max_jumptable_ents = 0;
1860
e41b2a33 1861
727a31fa 1862 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
1863 expand_used_vars ();
1864
7d69de61
RH
1865 /* Honor stack protection warnings. */
1866 if (warn_stack_protect)
1867 {
e3b5732b 1868 if (cfun->calls_alloca)
c5409249
MLI
1869 warning (OPT_Wstack_protector,
1870 "not protecting local variables: variable length buffer");
cb91fab0 1871 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
1872 warning (OPT_Wstack_protector,
1873 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
1874 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1875 }
1876
242229bb 1877 /* Set up parameters and prepare for return, for the function. */
b79c5284 1878 expand_function_start (current_function_decl);
242229bb
JH
1879
1880 /* If this function is `main', emit a call to `__main'
1881 to run global initializers, etc. */
1882 if (DECL_NAME (current_function_decl)
1883 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1884 && DECL_FILE_SCOPE_P (current_function_decl))
1885 expand_main_function ();
1886
7d69de61
RH
1887 /* Initialize the stack_protect_guard field. This must happen after the
1888 call to __main (if any) so that the external decl is initialized. */
cb91fab0 1889 if (crtl->stack_protect_guard)
7d69de61
RH
1890 stack_protect_prologue ();
1891
3fbd86b1 1892 /* Register rtl specific functions for cfg. */
242229bb
JH
1893 rtl_register_cfg_hooks ();
1894
1895 init_block = construct_init_block ();
1896
0ef90296 1897 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2a8a8292 1898 remaining edges in expand_gimple_basic_block. */
0ef90296
ZD
1899 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1900 e->flags &= ~EDGE_EXECUTABLE;
1901
8b11009b 1902 lab_rtx_for_bb = pointer_map_create ();
242229bb 1903 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 1904 bb = expand_gimple_basic_block (bb);
8b11009b 1905 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 1906 free_histograms ();
242229bb
JH
1907
1908 construct_exit_block ();
55e092c4
JH
1909 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1910 insn_locators_finalize ();
242229bb 1911
4586b4ca
SB
1912 /* We're done expanding trees to RTL. */
1913 currently_expanding_to_rtl = 0;
1914
e8a2a782 1915 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
242229bb 1916 convert_from_eh_region_ranges ();
e8a2a782 1917 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
1918
1919 rebuild_jump_labels (get_insns ());
1920 find_exception_handler_labels ();
1921
1922 blocks = sbitmap_alloc (last_basic_block);
1923 sbitmap_ones (blocks);
1924 find_many_sub_basic_blocks (blocks);
25cd19de 1925 purge_all_dead_edges ();
242229bb
JH
1926 sbitmap_free (blocks);
1927
1928 compact_blocks ();
1929#ifdef ENABLE_CHECKING
62e5bf5d 1930 verify_flow_info ();
242229bb 1931#endif
9f8628ba
PB
1932
1933 /* There's no need to defer outputting this function any more; we
1934 know we want to output it. */
1935 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1936
1937 /* Now that we're done expanding trees to RTL, we shouldn't have any
1938 more CONCATs anywhere. */
1939 generating_concat_p = 0;
1940
b7211528
SB
1941 if (dump_file)
1942 {
1943 fprintf (dump_file,
1944 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1945 /* And the pass manager will dump RTL for us. */
1946 }
ef330312
PB
1947
1948 /* If we're emitting a nested function, make sure its parent gets
1949 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 1950 {
ef330312
PB
1951 tree parent;
1952 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
1953 parent != NULL_TREE;
1954 parent = get_containing_scope (parent))
ef330312 1955 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 1956 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 1957 }
c22cacf3 1958
ef330312
PB
1959 /* We are now committed to emitting code for this function. Do any
1960 preparation, such as emitting abstract debug info for the inline
1961 before it gets mangled by optimization. */
1962 if (cgraph_function_possibly_inlined_p (current_function_decl))
1963 (*debug_hooks->outlining_inline_function) (current_function_decl);
1964
1965 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
1966
1967 /* After expanding, the return labels are no longer needed. */
1968 return_label = NULL;
1969 naked_return_label = NULL;
55e092c4
JH
1970 /* Tag the blocks with a depth number so that change_scope can find
1971 the common parent easily. */
1972 set_block_levels (DECL_INITIAL (cfun->decl), 0);
c2924966 1973 return 0;
242229bb
JH
1974}
1975
e3b5732b 1976struct rtl_opt_pass pass_expand =
242229bb 1977{
8ddbbcae 1978 {
e3b5732b 1979 RTL_PASS,
c22cacf3 1980 "expand", /* name */
242229bb 1981 NULL, /* gate */
c22cacf3 1982 tree_expand_cfg, /* execute */
242229bb
JH
1983 NULL, /* sub */
1984 NULL, /* next */
1985 0, /* static_pass_number */
c22cacf3 1986 TV_EXPAND, /* tv_id */
242229bb
JH
1987 /* ??? If TER is enabled, we actually receive GENERIC. */
1988 PROP_gimple_leh | PROP_cfg, /* properties_required */
1989 PROP_rtl, /* properties_provided */
bbbe4e7b 1990 PROP_trees, /* properties_destroyed */
242229bb 1991 0, /* todo_flags_start */
ef330312 1992 TODO_dump_func, /* todo_flags_finish */
8ddbbcae 1993 }
242229bb 1994};