]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
xcoff.h (ASM_OUTPUT_SPECIAL_POOL_ENTRY_P): Do not always place FP constants in the...
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
2d593c86 2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "tree.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "basic-block.h"
28#include "function.h"
29#include "expr.h"
30#include "langhooks.h"
31#include "tree-flow.h"
32#include "timevar.h"
33#include "tree-dump.h"
34#include "tree-pass.h"
35#include "except.h"
36#include "flags.h"
1f6d3a08
RH
37#include "diagnostic.h"
38#include "toplev.h"
ef330312 39#include "debug.h"
7d69de61 40#include "params.h"
ff28a94d 41#include "tree-inline.h"
6946b3f7 42#include "value-prof.h"
e41b2a33 43#include "target.h"
7d69de61 44
e53de54d
JH
45/* Verify that there is exactly single jump instruction since last and attach
46 REG_BR_PROB note specifying probability.
47 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
48 re-distribute it when the conditional expands into multiple conditionals.
49 This is however difficult to do. */
ef950eba 50void
10d22567 51add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
52{
53 if (profile_status == PROFILE_ABSENT)
54 return;
55 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 56 if (JUMP_P (last))
e53de54d
JH
57 {
58 /* It is common to emit condjump-around-jump sequence when we don't know
59 how to reverse the conditional. Special case this. */
60 if (!any_condjump_p (last)
2ca202e7 61 || !JUMP_P (NEXT_INSN (last))
e53de54d 62 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 63 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 64 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 65 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 66 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
67 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
68 goto failed;
41806d92 69 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
70 REG_NOTES (last)
71 = gen_rtx_EXPR_LIST (REG_BR_PROB,
72 GEN_INT (REG_BR_PROB_BASE - probability),
73 REG_NOTES (last));
74 return;
75 }
2ca202e7 76 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
77 goto failed;
78 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
e53de54d
JH
79 REG_NOTES (last)
80 = gen_rtx_EXPR_LIST (REG_BR_PROB,
81 GEN_INT (probability), REG_NOTES (last));
82 return;
83failed:
84 if (dump_file)
85 fprintf (dump_file, "Failed to add probability note\n");
86}
87
80c7a9eb 88
1f6d3a08
RH
89#ifndef STACK_ALIGNMENT_NEEDED
90#define STACK_ALIGNMENT_NEEDED 1
91#endif
92
1f6d3a08
RH
93
94/* This structure holds data relevant to one variable that will be
95 placed in a stack slot. */
96struct stack_var
97{
98 /* The Variable. */
99 tree decl;
100
101 /* The offset of the variable. During partitioning, this is the
102 offset relative to the partition. After partitioning, this
103 is relative to the stack frame. */
104 HOST_WIDE_INT offset;
105
106 /* Initially, the size of the variable. Later, the size of the partition,
107 if this variable becomes it's partition's representative. */
108 HOST_WIDE_INT size;
109
110 /* The *byte* alignment required for this variable. Or as, with the
111 size, the alignment for this partition. */
112 unsigned int alignb;
113
114 /* The partition representative. */
115 size_t representative;
116
117 /* The next stack variable in the partition, or EOC. */
118 size_t next;
119};
120
121#define EOC ((size_t)-1)
122
123/* We have an array of such objects while deciding allocation. */
124static struct stack_var *stack_vars;
125static size_t stack_vars_alloc;
126static size_t stack_vars_num;
127
128/* An array of indicies such that stack_vars[stack_vars_sorted[i]].size
129 is non-decreasing. */
130static size_t *stack_vars_sorted;
131
132/* We have an interference graph between such objects. This graph
133 is lower triangular. */
134static bool *stack_vars_conflict;
135static size_t stack_vars_conflict_alloc;
136
137/* The phase of the stack frame. This is the known misalignment of
138 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
139 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
140static int frame_phase;
141
7d69de61
RH
142/* Used during expand_used_vars to remember if we saw any decls for
143 which we'd like to enable stack smashing protection. */
144static bool has_protected_decls;
145
146/* Used during expand_used_vars. Remember if we say a character buffer
147 smaller than our cutoff threshold. Used for -Wstack-protector. */
148static bool has_short_buffer;
1f6d3a08
RH
149
150/* Discover the byte alignment to use for DECL. Ignore alignment
151 we can't do with expected alignment of the stack boundary. */
152
153static unsigned int
154get_decl_align_unit (tree decl)
155{
156 unsigned int align;
157
158 align = DECL_ALIGN (decl);
159 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
160 if (align > PREFERRED_STACK_BOUNDARY)
161 align = PREFERRED_STACK_BOUNDARY;
cb91fab0
JH
162 if (crtl->stack_alignment_needed < align)
163 crtl->stack_alignment_needed = align;
1f6d3a08
RH
164
165 return align / BITS_PER_UNIT;
166}
167
168/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
169 Return the frame offset. */
170
171static HOST_WIDE_INT
172alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
173{
174 HOST_WIDE_INT offset, new_frame_offset;
175
176 new_frame_offset = frame_offset;
177 if (FRAME_GROWS_DOWNWARD)
178 {
179 new_frame_offset -= size + frame_phase;
180 new_frame_offset &= -align;
181 new_frame_offset += frame_phase;
182 offset = new_frame_offset;
183 }
184 else
185 {
186 new_frame_offset -= frame_phase;
187 new_frame_offset += align - 1;
188 new_frame_offset &= -align;
189 new_frame_offset += frame_phase;
190 offset = new_frame_offset;
191 new_frame_offset += size;
192 }
193 frame_offset = new_frame_offset;
194
9fb798d7
EB
195 if (frame_offset_overflow (frame_offset, cfun->decl))
196 frame_offset = offset = 0;
197
1f6d3a08
RH
198 return offset;
199}
200
201/* Accumulate DECL into STACK_VARS. */
202
203static void
204add_stack_var (tree decl)
205{
206 if (stack_vars_num >= stack_vars_alloc)
207 {
208 if (stack_vars_alloc)
209 stack_vars_alloc = stack_vars_alloc * 3 / 2;
210 else
211 stack_vars_alloc = 32;
212 stack_vars
213 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
214 }
215 stack_vars[stack_vars_num].decl = decl;
216 stack_vars[stack_vars_num].offset = 0;
217 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
218 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
219
220 /* All variables are initially in their own partition. */
221 stack_vars[stack_vars_num].representative = stack_vars_num;
222 stack_vars[stack_vars_num].next = EOC;
223
224 /* Ensure that this decl doesn't get put onto the list twice. */
225 SET_DECL_RTL (decl, pc_rtx);
226
227 stack_vars_num++;
228}
229
230/* Compute the linear index of a lower-triangular coordinate (I, J). */
231
232static size_t
233triangular_index (size_t i, size_t j)
234{
235 if (i < j)
236 {
237 size_t t;
238 t = i, i = j, j = t;
239 }
240 return (i * (i + 1)) / 2 + j;
241}
242
243/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
244
245static void
246resize_stack_vars_conflict (size_t n)
247{
248 size_t size = triangular_index (n-1, n-1) + 1;
249
250 if (size <= stack_vars_conflict_alloc)
251 return;
252
253 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
254 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
255 (size - stack_vars_conflict_alloc) * sizeof (bool));
256 stack_vars_conflict_alloc = size;
257}
258
259/* Make the decls associated with luid's X and Y conflict. */
260
261static void
262add_stack_var_conflict (size_t x, size_t y)
263{
264 size_t index = triangular_index (x, y);
265 gcc_assert (index < stack_vars_conflict_alloc);
266 stack_vars_conflict[index] = true;
267}
268
269/* Check whether the decls associated with luid's X and Y conflict. */
270
271static bool
272stack_var_conflict_p (size_t x, size_t y)
273{
274 size_t index = triangular_index (x, y);
275 gcc_assert (index < stack_vars_conflict_alloc);
276 return stack_vars_conflict[index];
277}
d239ed56
SB
278
279/* Returns true if TYPE is or contains a union type. */
280
281static bool
282aggregate_contains_union_type (tree type)
283{
284 tree field;
285
286 if (TREE_CODE (type) == UNION_TYPE
287 || TREE_CODE (type) == QUAL_UNION_TYPE)
288 return true;
289 if (TREE_CODE (type) == ARRAY_TYPE)
290 return aggregate_contains_union_type (TREE_TYPE (type));
291 if (TREE_CODE (type) != RECORD_TYPE)
292 return false;
293
294 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
295 if (TREE_CODE (field) == FIELD_DECL)
296 if (aggregate_contains_union_type (TREE_TYPE (field)))
297 return true;
298
299 return false;
300}
301
1f6d3a08
RH
302/* A subroutine of expand_used_vars. If two variables X and Y have alias
303 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
304 in the interference graph. We also need to make sure to add conflicts
305 for union containing structures. Else RTL alias analysis comes along
306 and due to type based aliasing rules decides that for two overlapping
307 union temporaries { short s; int i; } accesses to the same mem through
308 different types may not alias and happily reorders stores across
309 life-time boundaries of the temporaries (See PR25654).
310 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
311
312static void
313add_alias_set_conflicts (void)
314{
315 size_t i, j, n = stack_vars_num;
316
317 for (i = 0; i < n; ++i)
318 {
a4d25453
RH
319 tree type_i = TREE_TYPE (stack_vars[i].decl);
320 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 321 bool contains_union;
1f6d3a08 322
d239ed56 323 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
324 for (j = 0; j < i; ++j)
325 {
a4d25453
RH
326 tree type_j = TREE_TYPE (stack_vars[j].decl);
327 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
328 if (aggr_i != aggr_j
329 /* Either the objects conflict by means of type based
330 aliasing rules, or we need to add a conflict. */
331 || !objects_must_conflict_p (type_i, type_j)
332 /* In case the types do not conflict ensure that access
333 to elements will conflict. In case of unions we have
334 to be careful as type based aliasing rules may say
335 access to the same memory does not conflict. So play
336 safe and add a conflict in this case. */
337 || contains_union)
1f6d3a08
RH
338 add_stack_var_conflict (i, j);
339 }
340 }
341}
342
343/* A subroutine of partition_stack_vars. A comparison function for qsort,
344 sorting an array of indicies by the size of the object. */
345
346static int
347stack_var_size_cmp (const void *a, const void *b)
348{
349 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
350 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
79f802f5
RG
351 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
352 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
1f6d3a08
RH
353
354 if (sa < sb)
355 return -1;
356 if (sa > sb)
357 return 1;
79f802f5
RG
358 /* For stack variables of the same size use the uid of the decl
359 to make the sort stable. */
360 if (uida < uidb)
361 return -1;
362 if (uida > uidb)
363 return 1;
1f6d3a08
RH
364 return 0;
365}
366
367/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
368 partitioning algorithm. Partitions A and B are known to be non-conflicting.
369 Merge them into a single partition A.
370
371 At the same time, add OFFSET to all variables in partition B. At the end
372 of the partitioning process we've have a nice block easy to lay out within
373 the stack frame. */
374
375static void
376union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
377{
378 size_t i, last;
379
380 /* Update each element of partition B with the given offset,
381 and merge them into partition A. */
382 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
383 {
384 stack_vars[i].offset += offset;
385 stack_vars[i].representative = a;
386 }
387 stack_vars[last].next = stack_vars[a].next;
388 stack_vars[a].next = b;
389
390 /* Update the required alignment of partition A to account for B. */
391 if (stack_vars[a].alignb < stack_vars[b].alignb)
392 stack_vars[a].alignb = stack_vars[b].alignb;
393
394 /* Update the interference graph and merge the conflicts. */
395 for (last = stack_vars_num, i = 0; i < last; ++i)
396 if (stack_var_conflict_p (b, i))
397 add_stack_var_conflict (a, i);
398}
399
400/* A subroutine of expand_used_vars. Binpack the variables into
401 partitions constrained by the interference graph. The overall
402 algorithm used is as follows:
403
404 Sort the objects by size.
405 For each object A {
406 S = size(A)
407 O = 0
408 loop {
409 Look for the largest non-conflicting object B with size <= S.
410 UNION (A, B)
411 offset(B) = O
412 O += size(B)
413 S -= size(B)
414 }
415 }
416*/
417
418static void
419partition_stack_vars (void)
420{
421 size_t si, sj, n = stack_vars_num;
422
423 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
424 for (si = 0; si < n; ++si)
425 stack_vars_sorted[si] = si;
426
427 if (n == 1)
428 return;
429
430 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
431
432 /* Special case: detect when all variables conflict, and thus we can't
433 do anything during the partitioning loop. It isn't uncommon (with
434 C code at least) to declare all variables at the top of the function,
435 and if we're not inlining, then all variables will be in the same scope.
436 Take advantage of very fast libc routines for this scan. */
437 gcc_assert (sizeof(bool) == sizeof(char));
438 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
439 return;
440
441 for (si = 0; si < n; ++si)
442 {
443 size_t i = stack_vars_sorted[si];
444 HOST_WIDE_INT isize = stack_vars[i].size;
445 HOST_WIDE_INT offset = 0;
446
447 for (sj = si; sj-- > 0; )
448 {
449 size_t j = stack_vars_sorted[sj];
450 HOST_WIDE_INT jsize = stack_vars[j].size;
451 unsigned int jalign = stack_vars[j].alignb;
452
453 /* Ignore objects that aren't partition representatives. */
454 if (stack_vars[j].representative != j)
455 continue;
456
457 /* Ignore objects too large for the remaining space. */
458 if (isize < jsize)
459 continue;
460
461 /* Ignore conflicting objects. */
462 if (stack_var_conflict_p (i, j))
463 continue;
464
465 /* Refine the remaining space check to include alignment. */
466 if (offset & (jalign - 1))
467 {
468 HOST_WIDE_INT toff = offset;
469 toff += jalign - 1;
470 toff &= -(HOST_WIDE_INT)jalign;
471 if (isize - (toff - offset) < jsize)
472 continue;
473
474 isize -= toff - offset;
475 offset = toff;
476 }
477
478 /* UNION the objects, placing J at OFFSET. */
479 union_stack_vars (i, j, offset);
480
481 isize -= jsize;
482 if (isize == 0)
483 break;
484 }
485 }
486}
487
488/* A debugging aid for expand_used_vars. Dump the generated partitions. */
489
490static void
491dump_stack_var_partition (void)
492{
493 size_t si, i, j, n = stack_vars_num;
494
495 for (si = 0; si < n; ++si)
496 {
497 i = stack_vars_sorted[si];
498
499 /* Skip variables that aren't partition representatives, for now. */
500 if (stack_vars[i].representative != i)
501 continue;
502
503 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
504 " align %u\n", (unsigned long) i, stack_vars[i].size,
505 stack_vars[i].alignb);
506
507 for (j = i; j != EOC; j = stack_vars[j].next)
508 {
509 fputc ('\t', dump_file);
510 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
511 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 512 stack_vars[j].offset);
1f6d3a08
RH
513 }
514 }
515}
516
517/* Assign rtl to DECL at frame offset OFFSET. */
518
519static void
520expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
521{
522 HOST_WIDE_INT align;
523 rtx x;
c22cacf3 524
1f6d3a08
RH
525 /* If this fails, we've overflowed the stack frame. Error nicely? */
526 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
527
528 x = plus_constant (virtual_stack_vars_rtx, offset);
529 x = gen_rtx_MEM (DECL_MODE (decl), x);
530
531 /* Set alignment we actually gave this decl. */
532 offset -= frame_phase;
533 align = offset & -offset;
534 align *= BITS_PER_UNIT;
535 if (align > STACK_BOUNDARY || align == 0)
536 align = STACK_BOUNDARY;
537 DECL_ALIGN (decl) = align;
538 DECL_USER_ALIGN (decl) = 0;
539
540 set_mem_attributes (x, decl, true);
541 SET_DECL_RTL (decl, x);
542}
543
544/* A subroutine of expand_used_vars. Give each partition representative
545 a unique location within the stack frame. Update each partition member
546 with that location. */
547
548static void
7d69de61 549expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
550{
551 size_t si, i, j, n = stack_vars_num;
552
553 for (si = 0; si < n; ++si)
554 {
555 HOST_WIDE_INT offset;
556
557 i = stack_vars_sorted[si];
558
559 /* Skip variables that aren't partition representatives, for now. */
560 if (stack_vars[i].representative != i)
561 continue;
562
7d69de61
RH
563 /* Skip variables that have already had rtl assigned. See also
564 add_stack_var where we perpetrate this pc_rtx hack. */
565 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
566 continue;
567
c22cacf3 568 /* Check the predicate to see whether this variable should be
7d69de61
RH
569 allocated in this pass. */
570 if (pred && !pred (stack_vars[i].decl))
571 continue;
572
1f6d3a08
RH
573 offset = alloc_stack_frame_space (stack_vars[i].size,
574 stack_vars[i].alignb);
575
576 /* Create rtl for each variable based on their location within the
577 partition. */
578 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
579 {
580 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
581 expand_one_stack_var_at (stack_vars[j].decl,
582 stack_vars[j].offset + offset);
583 }
1f6d3a08
RH
584 }
585}
586
ff28a94d
JH
587/* Take into account all sizes of partitions and reset DECL_RTLs. */
588static HOST_WIDE_INT
589account_stack_vars (void)
590{
591 size_t si, j, i, n = stack_vars_num;
592 HOST_WIDE_INT size = 0;
593
594 for (si = 0; si < n; ++si)
595 {
596 i = stack_vars_sorted[si];
597
598 /* Skip variables that aren't partition representatives, for now. */
599 if (stack_vars[i].representative != i)
600 continue;
601
602 size += stack_vars[i].size;
603 for (j = i; j != EOC; j = stack_vars[j].next)
604 SET_DECL_RTL (stack_vars[j].decl, NULL);
605 }
606 return size;
607}
608
1f6d3a08
RH
609/* A subroutine of expand_one_var. Called to immediately assign rtl
610 to a variable to be allocated in the stack frame. */
611
612static void
613expand_one_stack_var (tree var)
614{
615 HOST_WIDE_INT size, offset, align;
616
617 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
618 align = get_decl_align_unit (var);
619 offset = alloc_stack_frame_space (size, align);
620
621 expand_one_stack_var_at (var, offset);
622}
623
624/* A subroutine of expand_one_var. Called to assign rtl
625 to a TREE_STATIC VAR_DECL. */
626
627static void
628expand_one_static_var (tree var)
629{
3b9ade75
JH
630 /* In unit-at-a-time all the static variables are expanded at the end
631 of compilation process. */
632 if (flag_unit_at_a_time)
633 return;
1f6d3a08
RH
634 /* If this is an inlined copy of a static local variable,
635 look up the original. */
636 var = DECL_ORIGIN (var);
637
638 /* If we've already processed this variable because of that, do nothing. */
639 if (TREE_ASM_WRITTEN (var))
640 return;
641
1f6d3a08
RH
642 /* Otherwise, just emit the variable. */
643 rest_of_decl_compilation (var, 0, 0);
644}
645
646/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
647 that will reside in a hard register. */
648
649static void
650expand_one_hard_reg_var (tree var)
651{
652 rest_of_decl_compilation (var, 0, 0);
653}
654
655/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
656 that will reside in a pseudo register. */
657
658static void
659expand_one_register_var (tree var)
660{
661 tree type = TREE_TYPE (var);
662 int unsignedp = TYPE_UNSIGNED (type);
663 enum machine_mode reg_mode
664 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
665 rtx x = gen_reg_rtx (reg_mode);
666
667 SET_DECL_RTL (var, x);
668
669 /* Note if the object is a user variable. */
670 if (!DECL_ARTIFICIAL (var))
1f6d3a08
RH
671 mark_user_reg (x);
672
61021c2c
AP
673 if (POINTER_TYPE_P (type))
674 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
1f6d3a08
RH
675}
676
677/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 678 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
679 to pick something that won't crash the rest of the compiler. */
680
681static void
682expand_one_error_var (tree var)
683{
684 enum machine_mode mode = DECL_MODE (var);
685 rtx x;
686
687 if (mode == BLKmode)
688 x = gen_rtx_MEM (BLKmode, const0_rtx);
689 else if (mode == VOIDmode)
690 x = const0_rtx;
691 else
692 x = gen_reg_rtx (mode);
693
694 SET_DECL_RTL (var, x);
695}
696
c22cacf3 697/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
698 allocated to the local stack frame. Return true if we wish to
699 add VAR to STACK_VARS so that it will be coalesced with other
700 variables. Return false to allocate VAR immediately.
701
702 This function is used to reduce the number of variables considered
703 for coalescing, which reduces the size of the quadratic problem. */
704
705static bool
706defer_stack_allocation (tree var, bool toplevel)
707{
7d69de61
RH
708 /* If stack protection is enabled, *all* stack variables must be deferred,
709 so that we can re-order the strings to the top of the frame. */
710 if (flag_stack_protect)
711 return true;
712
1f6d3a08
RH
713 /* Variables in the outermost scope automatically conflict with
714 every other variable. The only reason to want to defer them
715 at all is that, after sorting, we can more efficiently pack
716 small variables in the stack frame. Continue to defer at -O2. */
717 if (toplevel && optimize < 2)
718 return false;
719
720 /* Without optimization, *most* variables are allocated from the
721 stack, which makes the quadratic problem large exactly when we
c22cacf3 722 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
723 other hand, we don't want the function's stack frame size to
724 get completely out of hand. So we avoid adding scalars and
725 "small" aggregates to the list at all. */
726 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
727 return false;
728
729 return true;
730}
731
732/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 733 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
734 expanded yet, merely recorded.
735 When REALLY_EXPAND is false, only add stack values to be allocated.
736 Return stack usage this variable is supposed to take.
737*/
1f6d3a08 738
ff28a94d
JH
739static HOST_WIDE_INT
740expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08
RH
741{
742 if (TREE_CODE (var) != VAR_DECL)
4846b435 743 ;
1f6d3a08
RH
744 else if (DECL_EXTERNAL (var))
745 ;
833b3afe 746 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
747 ;
748 else if (TREE_STATIC (var))
ff28a94d
JH
749 {
750 if (really_expand)
751 expand_one_static_var (var);
752 }
1f6d3a08
RH
753 else if (DECL_RTL_SET_P (var))
754 ;
755 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
756 {
757 if (really_expand)
758 expand_one_error_var (var);
759 }
1f6d3a08 760 else if (DECL_HARD_REGISTER (var))
ff28a94d
JH
761 {
762 if (really_expand)
763 expand_one_hard_reg_var (var);
764 }
1f6d3a08 765 else if (use_register_for_decl (var))
ff28a94d
JH
766 {
767 if (really_expand)
768 expand_one_register_var (var);
769 }
1f6d3a08
RH
770 else if (defer_stack_allocation (var, toplevel))
771 add_stack_var (var);
772 else
ff28a94d 773 {
bd9f1b4b
JH
774 if (really_expand)
775 expand_one_stack_var (var);
ff28a94d
JH
776 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
777 }
778 return 0;
1f6d3a08
RH
779}
780
781/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
782 expanding variables. Those variables that can be put into registers
783 are allocated pseudos; those that can't are put on the stack.
784
785 TOPLEVEL is true if this is the outermost BLOCK. */
786
787static void
788expand_used_vars_for_block (tree block, bool toplevel)
789{
790 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
791 tree t;
792
793 old_sv_num = toplevel ? 0 : stack_vars_num;
794
795 /* Expand all variables at this level. */
796 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1368453c
JH
797 if (TREE_USED (t)
798 /* Force local static variables to be output when marked by
799 used attribute. For unit-at-a-time, cgraph code already takes
800 care of this. */
801 || (!flag_unit_at_a_time && TREE_STATIC (t)
802 && DECL_PRESERVE_P (t)))
ff28a94d 803 expand_one_var (t, toplevel, true);
1f6d3a08
RH
804
805 this_sv_num = stack_vars_num;
806
807 /* Expand all variables at containing levels. */
808 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
809 expand_used_vars_for_block (t, false);
810
811 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 812 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
813 tree in the interference graph. Here we cause all variables at this
814 level, and all sublevels, to conflict. Do make certain that a
815 variable conflicts with itself. */
816 if (old_sv_num < this_sv_num)
817 {
818 new_sv_num = stack_vars_num;
819 resize_stack_vars_conflict (new_sv_num);
820
821 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
822 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
823 add_stack_var_conflict (i, j);
1f6d3a08
RH
824 }
825}
826
827/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
828 and clear TREE_USED on all local variables. */
829
830static void
831clear_tree_used (tree block)
832{
833 tree t;
834
835 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
836 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
837 TREE_USED (t) = 0;
838
839 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
840 clear_tree_used (t);
841}
842
7d69de61
RH
843/* Examine TYPE and determine a bit mask of the following features. */
844
845#define SPCT_HAS_LARGE_CHAR_ARRAY 1
846#define SPCT_HAS_SMALL_CHAR_ARRAY 2
847#define SPCT_HAS_ARRAY 4
848#define SPCT_HAS_AGGREGATE 8
849
850static unsigned int
851stack_protect_classify_type (tree type)
852{
853 unsigned int ret = 0;
854 tree t;
855
856 switch (TREE_CODE (type))
857 {
858 case ARRAY_TYPE:
859 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
860 if (t == char_type_node
861 || t == signed_char_type_node
862 || t == unsigned_char_type_node)
863 {
15362b89
JJ
864 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
865 unsigned HOST_WIDE_INT len;
7d69de61 866
15362b89
JJ
867 if (!TYPE_SIZE_UNIT (type)
868 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
869 len = max;
7d69de61 870 else
15362b89 871 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
872
873 if (len < max)
874 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
875 else
876 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
877 }
878 else
879 ret = SPCT_HAS_ARRAY;
880 break;
881
882 case UNION_TYPE:
883 case QUAL_UNION_TYPE:
884 case RECORD_TYPE:
885 ret = SPCT_HAS_AGGREGATE;
886 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
887 if (TREE_CODE (t) == FIELD_DECL)
888 ret |= stack_protect_classify_type (TREE_TYPE (t));
889 break;
890
891 default:
892 break;
893 }
894
895 return ret;
896}
897
a4d05547
KH
898/* Return nonzero if DECL should be segregated into the "vulnerable" upper
899 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
900 any variable in this function. The return value is the phase number in
901 which the variable should be allocated. */
902
903static int
904stack_protect_decl_phase (tree decl)
905{
906 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
907 int ret = 0;
908
909 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
910 has_short_buffer = true;
911
912 if (flag_stack_protect == 2)
913 {
914 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
915 && !(bits & SPCT_HAS_AGGREGATE))
916 ret = 1;
917 else if (bits & SPCT_HAS_ARRAY)
918 ret = 2;
919 }
920 else
921 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
922
923 if (ret)
924 has_protected_decls = true;
925
926 return ret;
927}
928
929/* Two helper routines that check for phase 1 and phase 2. These are used
930 as callbacks for expand_stack_vars. */
931
932static bool
933stack_protect_decl_phase_1 (tree decl)
934{
935 return stack_protect_decl_phase (decl) == 1;
936}
937
938static bool
939stack_protect_decl_phase_2 (tree decl)
940{
941 return stack_protect_decl_phase (decl) == 2;
942}
943
944/* Ensure that variables in different stack protection phases conflict
945 so that they are not merged and share the same stack slot. */
946
947static void
948add_stack_protection_conflicts (void)
949{
950 size_t i, j, n = stack_vars_num;
951 unsigned char *phase;
952
953 phase = XNEWVEC (unsigned char, n);
954 for (i = 0; i < n; ++i)
955 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
956
957 for (i = 0; i < n; ++i)
958 {
959 unsigned char ph_i = phase[i];
960 for (j = 0; j < i; ++j)
961 if (ph_i != phase[j])
962 add_stack_var_conflict (i, j);
963 }
964
965 XDELETEVEC (phase);
966}
967
968/* Create a decl for the guard at the top of the stack frame. */
969
970static void
971create_stack_guard (void)
972{
973 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
974 TREE_THIS_VOLATILE (guard) = 1;
975 TREE_USED (guard) = 1;
976 expand_one_stack_var (guard);
cb91fab0 977 crtl->stack_protect_guard = guard;
7d69de61
RH
978}
979
ff28a94d
JH
980/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
981 expanding variables. Those variables that can be put into registers
982 are allocated pseudos; those that can't are put on the stack.
983
984 TOPLEVEL is true if this is the outermost BLOCK. */
985
986static HOST_WIDE_INT
987account_used_vars_for_block (tree block, bool toplevel)
988{
989 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
990 tree t;
991 HOST_WIDE_INT size = 0;
992
993 old_sv_num = toplevel ? 0 : stack_vars_num;
994
995 /* Expand all variables at this level. */
996 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
997 if (TREE_USED (t))
998 size += expand_one_var (t, toplevel, false);
999
1000 this_sv_num = stack_vars_num;
1001
1002 /* Expand all variables at containing levels. */
1003 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1004 size += account_used_vars_for_block (t, false);
1005
1006 /* Since we do not track exact variable lifetimes (which is not even
1007 possible for variables whose address escapes), we mirror the block
1008 tree in the interference graph. Here we cause all variables at this
1009 level, and all sublevels, to conflict. Do make certain that a
1010 variable conflicts with itself. */
1011 if (old_sv_num < this_sv_num)
1012 {
1013 new_sv_num = stack_vars_num;
1014 resize_stack_vars_conflict (new_sv_num);
1015
1016 for (i = old_sv_num; i < new_sv_num; ++i)
1017 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1018 add_stack_var_conflict (i, j);
1019 }
1020 return size;
1021}
1022
1023/* Prepare for expanding variables. */
1024static void
1025init_vars_expansion (void)
1026{
1027 tree t;
cb91fab0
JH
1028 /* Set TREE_USED on all variables in the local_decls. */
1029 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1030 TREE_USED (TREE_VALUE (t)) = 1;
1031
1032 /* Clear TREE_USED on all variables associated with a block scope. */
1033 clear_tree_used (DECL_INITIAL (current_function_decl));
1034
1035 /* Initialize local stack smashing state. */
1036 has_protected_decls = false;
1037 has_short_buffer = false;
1038}
1039
1040/* Free up stack variable graph data. */
1041static void
1042fini_vars_expansion (void)
1043{
1044 XDELETEVEC (stack_vars);
1045 XDELETEVEC (stack_vars_sorted);
1046 XDELETEVEC (stack_vars_conflict);
1047 stack_vars = NULL;
1048 stack_vars_alloc = stack_vars_num = 0;
1049 stack_vars_conflict = NULL;
1050 stack_vars_conflict_alloc = 0;
1051}
1052
1053HOST_WIDE_INT
1054estimated_stack_frame_size (void)
1055{
1056 HOST_WIDE_INT size = 0;
1057 tree t, outer_block = DECL_INITIAL (current_function_decl);
1058
1059 init_vars_expansion ();
1060
cb91fab0 1061 /* At this point all variables on the local_decls with TREE_USED
ff28a94d 1062 set are not associated with any block scope. Lay them out. */
cb91fab0 1063 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1064 {
1065 tree var = TREE_VALUE (t);
1066
1067 if (TREE_USED (var))
1068 size += expand_one_var (var, true, false);
1069 TREE_USED (var) = 1;
1070 }
1071 size += account_used_vars_for_block (outer_block, true);
1072 if (stack_vars_num > 0)
1073 {
1074 /* Due to the way alias sets work, no variables with non-conflicting
1075 alias sets may be assigned the same address. Add conflicts to
1076 reflect this. */
1077 add_alias_set_conflicts ();
1078
1079 /* If stack protection is enabled, we don't share space between
1080 vulnerable data and non-vulnerable data. */
1081 if (flag_stack_protect)
1082 add_stack_protection_conflicts ();
1083
1084 /* Now that we have collected all stack variables, and have computed a
1085 minimal interference graph, attempt to save some stack space. */
1086 partition_stack_vars ();
1087 if (dump_file)
1088 dump_stack_var_partition ();
1089
1090 size += account_stack_vars ();
1091 fini_vars_expansion ();
1092 }
1093 return size;
1094}
1095
1f6d3a08 1096/* Expand all variables used in the function. */
727a31fa
RH
1097
1098static void
1099expand_used_vars (void)
1100{
1f6d3a08 1101 tree t, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 1102
1f6d3a08
RH
1103 /* Compute the phase of the stack frame for this function. */
1104 {
1105 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1106 int off = STARTING_FRAME_OFFSET % align;
1107 frame_phase = off ? align - off : 0;
1108 }
727a31fa 1109
ff28a94d 1110 init_vars_expansion ();
7d69de61 1111
cb91fab0 1112 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1113 set are not associated with any block scope. Lay them out. */
cb91fab0 1114 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
1f6d3a08
RH
1115 {
1116 tree var = TREE_VALUE (t);
1117 bool expand_now = false;
1118
1119 /* We didn't set a block for static or extern because it's hard
1120 to tell the difference between a global variable (re)declared
1121 in a local scope, and one that's really declared there to
1122 begin with. And it doesn't really matter much, since we're
1123 not giving them stack space. Expand them now. */
1124 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1125 expand_now = true;
1126
1127 /* Any variable that could have been hoisted into an SSA_NAME
1128 will have been propagated anywhere the optimizers chose,
1129 i.e. not confined to their original block. Allocate them
1130 as if they were defined in the outermost scope. */
1131 else if (is_gimple_reg (var))
1132 expand_now = true;
1133
1134 /* If the variable is not associated with any block, then it
1135 was created by the optimizers, and could be live anywhere
1136 in the function. */
1137 else if (TREE_USED (var))
1138 expand_now = true;
1139
1140 /* Finally, mark all variables on the list as used. We'll use
1141 this in a moment when we expand those associated with scopes. */
1142 TREE_USED (var) = 1;
1143
1144 if (expand_now)
ff28a94d 1145 expand_one_var (var, true, true);
1f6d3a08 1146 }
cb91fab0 1147 cfun->local_decls = NULL_TREE;
1f6d3a08
RH
1148
1149 /* At this point, all variables within the block tree with TREE_USED
1150 set are actually used by the optimized function. Lay them out. */
1151 expand_used_vars_for_block (outer_block, true);
1152
1153 if (stack_vars_num > 0)
1154 {
1155 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1156 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1157 reflect this. */
1158 add_alias_set_conflicts ();
1159
c22cacf3 1160 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1161 vulnerable data and non-vulnerable data. */
1162 if (flag_stack_protect)
1163 add_stack_protection_conflicts ();
1164
c22cacf3 1165 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1166 minimal interference graph, attempt to save some stack space. */
1167 partition_stack_vars ();
1168 if (dump_file)
1169 dump_stack_var_partition ();
7d69de61
RH
1170 }
1171
1172 /* There are several conditions under which we should create a
1173 stack guard: protect-all, alloca used, protected decls present. */
1174 if (flag_stack_protect == 2
1175 || (flag_stack_protect
e3b5732b 1176 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1177 create_stack_guard ();
1f6d3a08 1178
7d69de61
RH
1179 /* Assign rtl to each variable based on these partitions. */
1180 if (stack_vars_num > 0)
1181 {
1182 /* Reorder decls to be protected by iterating over the variables
1183 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1184 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1185 earlier, such that we naturally see these variables first,
1186 and thus naturally allocate things in the right order. */
1187 if (has_protected_decls)
1188 {
1189 /* Phase 1 contains only character arrays. */
1190 expand_stack_vars (stack_protect_decl_phase_1);
1191
1192 /* Phase 2 contains other kinds of arrays. */
1193 if (flag_stack_protect == 2)
1194 expand_stack_vars (stack_protect_decl_phase_2);
1195 }
1196
1197 expand_stack_vars (NULL);
1f6d3a08 1198
ff28a94d 1199 fini_vars_expansion ();
1f6d3a08
RH
1200 }
1201
1202 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1203 if (STACK_ALIGNMENT_NEEDED)
1204 {
1205 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1206 if (!FRAME_GROWS_DOWNWARD)
1207 frame_offset += align - 1;
1208 frame_offset &= -align;
1209 }
727a31fa
RH
1210}
1211
1212
b7211528
SB
1213/* If we need to produce a detailed dump, print the tree representation
1214 for STMT to the dump file. SINCE is the last RTX after which the RTL
1215 generated for STMT should have been appended. */
1216
1217static void
1218maybe_dump_rtl_for_tree_stmt (tree stmt, rtx since)
1219{
1220 if (dump_file && (dump_flags & TDF_DETAILS))
1221 {
1222 fprintf (dump_file, "\n;; ");
1223 print_generic_expr (dump_file, stmt, TDF_SLIM);
1224 fprintf (dump_file, "\n");
1225
1226 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1227 }
1228}
1229
8b11009b
ZD
1230/* Maps the blocks that do not contain tree labels to rtx labels. */
1231
1232static struct pointer_map_t *lab_rtx_for_bb;
1233
a9b77cd1
ZD
1234/* Returns the label_rtx expression for a label starting basic block BB. */
1235
1236static rtx
1237label_rtx_for_bb (basic_block bb)
1238{
1239 tree_stmt_iterator tsi;
1240 tree lab, lab_stmt;
8b11009b 1241 void **elt;
a9b77cd1
ZD
1242
1243 if (bb->flags & BB_RTL)
1244 return block_label (bb);
1245
8b11009b
ZD
1246 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1247 if (elt)
ae50c0cb 1248 return (rtx) *elt;
8b11009b
ZD
1249
1250 /* Find the tree label if it is present. */
1251
a9b77cd1
ZD
1252 for (tsi = tsi_start (bb_stmt_list (bb)); !tsi_end_p (tsi); tsi_next (&tsi))
1253 {
1254 lab_stmt = tsi_stmt (tsi);
1255 if (TREE_CODE (lab_stmt) != LABEL_EXPR)
1256 break;
1257
1258 lab = LABEL_EXPR_LABEL (lab_stmt);
1259 if (DECL_NONLOCAL (lab))
1260 break;
1261
1262 return label_rtx (lab);
1263 }
1264
8b11009b
ZD
1265 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1266 *elt = gen_label_rtx ();
ae50c0cb 1267 return (rtx) *elt;
a9b77cd1
ZD
1268}
1269
80c7a9eb
RH
1270/* A subroutine of expand_gimple_basic_block. Expand one COND_EXPR.
1271 Returns a new basic block if we've terminated the current basic
1272 block and created a new one. */
1273
1274static basic_block
1275expand_gimple_cond_expr (basic_block bb, tree stmt)
1276{
1277 basic_block new_bb, dest;
1278 edge new_edge;
1279 edge true_edge;
1280 edge false_edge;
1281 tree pred = COND_EXPR_COND (stmt);
b7211528
SB
1282 rtx last2, last;
1283
a9b77cd1
ZD
1284 gcc_assert (COND_EXPR_THEN (stmt) == NULL_TREE);
1285 gcc_assert (COND_EXPR_ELSE (stmt) == NULL_TREE);
b7211528 1286 last2 = last = get_last_insn ();
80c7a9eb
RH
1287
1288 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
1289 if (EXPR_LOCUS (stmt))
1290 {
55e092c4
JH
1291 set_curr_insn_source_location (*(EXPR_LOCUS (stmt)));
1292 set_curr_insn_block (TREE_BLOCK (stmt));
80c7a9eb
RH
1293 }
1294
1295 /* These flags have no purpose in RTL land. */
1296 true_edge->flags &= ~EDGE_TRUE_VALUE;
1297 false_edge->flags &= ~EDGE_FALSE_VALUE;
1298
1299 /* We can either have a pure conditional jump with one fallthru edge or
1300 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1301 if (false_edge->dest == bb->next_bb)
80c7a9eb 1302 {
a9b77cd1 1303 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1304 add_reg_br_prob_note (last, true_edge->probability);
b7211528 1305 maybe_dump_rtl_for_tree_stmt (stmt, last);
a9b77cd1 1306 if (true_edge->goto_locus)
2d593c86 1307 set_curr_insn_source_location (true_edge->goto_locus);
a9b77cd1 1308 false_edge->flags |= EDGE_FALLTHRU;
80c7a9eb
RH
1309 return NULL;
1310 }
a9b77cd1 1311 if (true_edge->dest == bb->next_bb)
80c7a9eb 1312 {
a9b77cd1 1313 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
10d22567 1314 add_reg_br_prob_note (last, false_edge->probability);
b7211528 1315 maybe_dump_rtl_for_tree_stmt (stmt, last);
a9b77cd1 1316 if (false_edge->goto_locus)
2d593c86 1317 set_curr_insn_source_location (false_edge->goto_locus);
a9b77cd1 1318 true_edge->flags |= EDGE_FALLTHRU;
80c7a9eb
RH
1319 return NULL;
1320 }
80c7a9eb 1321
a9b77cd1 1322 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1323 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1324 last = get_last_insn ();
a9b77cd1 1325 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1326
1327 BB_END (bb) = last;
1328 if (BARRIER_P (BB_END (bb)))
1329 BB_END (bb) = PREV_INSN (BB_END (bb));
1330 update_bb_for_insn (bb);
1331
1332 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1333 dest = false_edge->dest;
1334 redirect_edge_succ (false_edge, new_bb);
1335 false_edge->flags |= EDGE_FALLTHRU;
1336 new_bb->count = false_edge->count;
1337 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1338 new_edge = make_edge (new_bb, dest, 0);
1339 new_edge->probability = REG_BR_PROB_BASE;
1340 new_edge->count = new_bb->count;
1341 if (BARRIER_P (BB_END (new_bb)))
1342 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1343 update_bb_for_insn (new_bb);
1344
b7211528 1345 maybe_dump_rtl_for_tree_stmt (stmt, last2);
c22cacf3 1346
a9b77cd1 1347 if (false_edge->goto_locus)
2d593c86 1348 set_curr_insn_source_location (false_edge->goto_locus);
80c7a9eb
RH
1349
1350 return new_bb;
1351}
1352
1353/* A subroutine of expand_gimple_basic_block. Expand one CALL_EXPR
224e770b
RH
1354 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1355 generated a tail call (something that might be denied by the ABI
cea49550
RH
1356 rules governing the call; see calls.c).
1357
1358 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1359 can still reach the rest of BB. The case here is __builtin_sqrt,
1360 where the NaN result goes through the external function (with a
1361 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1362
1363static basic_block
cea49550 1364expand_gimple_tailcall (basic_block bb, tree stmt, bool *can_fallthru)
80c7a9eb 1365{
b7211528 1366 rtx last2, last;
224e770b 1367 edge e;
628f6a4e 1368 edge_iterator ei;
224e770b
RH
1369 int probability;
1370 gcov_type count;
80c7a9eb 1371
b7211528
SB
1372 last2 = last = get_last_insn ();
1373
80c7a9eb
RH
1374 expand_expr_stmt (stmt);
1375
1376 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1377 if (CALL_P (last) && SIBLING_CALL_P (last))
1378 goto found;
80c7a9eb 1379
7315a949 1380 maybe_dump_rtl_for_tree_stmt (stmt, last2);
b7211528 1381
cea49550 1382 *can_fallthru = true;
224e770b 1383 return NULL;
80c7a9eb 1384
224e770b
RH
1385 found:
1386 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1387 Any instructions emitted here are about to be deleted. */
1388 do_pending_stack_adjust ();
1389
1390 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1391 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1392 EH or abnormal edges, we shouldn't have created a tail call in
1393 the first place. So it seems to me we should just be removing
1394 all edges here, or redirecting the existing fallthru edge to
1395 the exit block. */
1396
224e770b
RH
1397 probability = 0;
1398 count = 0;
224e770b 1399
628f6a4e
BE
1400 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1401 {
224e770b
RH
1402 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1403 {
1404 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1405 {
224e770b
RH
1406 e->dest->count -= e->count;
1407 e->dest->frequency -= EDGE_FREQUENCY (e);
1408 if (e->dest->count < 0)
c22cacf3 1409 e->dest->count = 0;
224e770b 1410 if (e->dest->frequency < 0)
c22cacf3 1411 e->dest->frequency = 0;
80c7a9eb 1412 }
224e770b
RH
1413 count += e->count;
1414 probability += e->probability;
1415 remove_edge (e);
80c7a9eb 1416 }
628f6a4e
BE
1417 else
1418 ei_next (&ei);
80c7a9eb
RH
1419 }
1420
224e770b
RH
1421 /* This is somewhat ugly: the call_expr expander often emits instructions
1422 after the sibcall (to perform the function return). These confuse the
12eff7b7 1423 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1424 last = NEXT_INSN (last);
341c100f 1425 gcc_assert (BARRIER_P (last));
cea49550
RH
1426
1427 *can_fallthru = false;
224e770b
RH
1428 while (NEXT_INSN (last))
1429 {
1430 /* For instance an sqrt builtin expander expands if with
1431 sibcall in the then and label for `else`. */
1432 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1433 {
1434 *can_fallthru = true;
1435 break;
1436 }
224e770b
RH
1437 delete_insn (NEXT_INSN (last));
1438 }
1439
1440 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1441 e->probability += probability;
1442 e->count += count;
1443 BB_END (bb) = last;
1444 update_bb_for_insn (bb);
1445
1446 if (NEXT_INSN (last))
1447 {
1448 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1449
1450 last = BB_END (bb);
1451 if (BARRIER_P (last))
1452 BB_END (bb) = PREV_INSN (last);
1453 }
1454
b7211528
SB
1455 maybe_dump_rtl_for_tree_stmt (stmt, last2);
1456
224e770b 1457 return bb;
80c7a9eb
RH
1458}
1459
242229bb
JH
1460/* Expand basic block BB from GIMPLE trees to RTL. */
1461
1462static basic_block
10d22567 1463expand_gimple_basic_block (basic_block bb)
242229bb 1464{
7506e1cb
ZD
1465 tree_stmt_iterator tsi;
1466 tree stmts = bb_stmt_list (bb);
242229bb
JH
1467 tree stmt = NULL;
1468 rtx note, last;
1469 edge e;
628f6a4e 1470 edge_iterator ei;
8b11009b 1471 void **elt;
242229bb
JH
1472
1473 if (dump_file)
1474 {
b7211528
SB
1475 fprintf (dump_file,
1476 "\n;; Generating RTL for tree basic block %d\n",
1477 bb->index);
242229bb
JH
1478 }
1479
7506e1cb 1480 bb->il.tree = NULL;
5e2d947c
JH
1481 init_rtl_bb_info (bb);
1482 bb->flags |= BB_RTL;
1483
a9b77cd1
ZD
1484 /* Remove the RETURN_EXPR if we may fall though to the exit
1485 instead. */
1486 tsi = tsi_last (stmts);
1487 if (!tsi_end_p (tsi)
1488 && TREE_CODE (tsi_stmt (tsi)) == RETURN_EXPR)
1489 {
1490 tree ret_stmt = tsi_stmt (tsi);
1491
1492 gcc_assert (single_succ_p (bb));
1493 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1494
1495 if (bb->next_bb == EXIT_BLOCK_PTR
1496 && !TREE_OPERAND (ret_stmt, 0))
1497 {
1498 tsi_delink (&tsi);
1499 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1500 }
1501 }
1502
7506e1cb
ZD
1503 tsi = tsi_start (stmts);
1504 if (!tsi_end_p (tsi))
8b11009b
ZD
1505 {
1506 stmt = tsi_stmt (tsi);
1507 if (TREE_CODE (stmt) != LABEL_EXPR)
1508 stmt = NULL_TREE;
1509 }
242229bb 1510
8b11009b
ZD
1511 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1512
1513 if (stmt || elt)
242229bb
JH
1514 {
1515 last = get_last_insn ();
1516
8b11009b
ZD
1517 if (stmt)
1518 {
1519 expand_expr_stmt (stmt);
1520 tsi_next (&tsi);
1521 }
1522
1523 if (elt)
ae50c0cb 1524 emit_label ((rtx) *elt);
242229bb 1525
caf93cb0 1526 /* Java emits line number notes in the top of labels.
c22cacf3 1527 ??? Make this go away once line number notes are obsoleted. */
242229bb 1528 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 1529 if (NOTE_P (BB_HEAD (bb)))
242229bb 1530 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 1531 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528
SB
1532
1533 maybe_dump_rtl_for_tree_stmt (stmt, last);
242229bb
JH
1534 }
1535 else
1536 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1537
1538 NOTE_BASIC_BLOCK (note) = bb;
1539
628f6a4e 1540 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
242229bb 1541 {
242229bb
JH
1542 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1543 e->flags &= ~EDGE_EXECUTABLE;
1544
1545 /* At the moment not all abnormal edges match the RTL representation.
c22cacf3
MS
1546 It is safe to remove them here as find_many_sub_basic_blocks will
1547 rediscover them. In the future we should get this fixed properly. */
242229bb
JH
1548 if (e->flags & EDGE_ABNORMAL)
1549 remove_edge (e);
628f6a4e
BE
1550 else
1551 ei_next (&ei);
242229bb
JH
1552 }
1553
7506e1cb 1554 for (; !tsi_end_p (tsi); tsi_next (&tsi))
242229bb 1555 {
7506e1cb 1556 tree stmt = tsi_stmt (tsi);
cea49550 1557 basic_block new_bb;
242229bb
JH
1558
1559 if (!stmt)
1560 continue;
1561
1562 /* Expand this statement, then evaluate the resulting RTL and
1563 fixup the CFG accordingly. */
80c7a9eb 1564 if (TREE_CODE (stmt) == COND_EXPR)
cea49550
RH
1565 {
1566 new_bb = expand_gimple_cond_expr (bb, stmt);
1567 if (new_bb)
1568 return new_bb;
1569 }
80c7a9eb 1570 else
242229bb 1571 {
80c7a9eb 1572 tree call = get_call_expr_in (stmt);
4437b50d
JH
1573 int region;
1574 /* For the benefit of calls.c, converting all this to rtl,
1575 we need to record the call expression, not just the outer
1576 modify statement. */
079a182e
JH
1577 if (call && call != stmt)
1578 {
1579 if ((region = lookup_stmt_eh_region (stmt)) > 0)
1580 add_stmt_to_eh_region (call, region);
1581 gimple_duplicate_stmt_histograms (cfun, call, cfun, stmt);
1582 }
80c7a9eb 1583 if (call && CALL_EXPR_TAILCALL (call))
cea49550
RH
1584 {
1585 bool can_fallthru;
1586 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1587 if (new_bb)
1588 {
1589 if (can_fallthru)
1590 bb = new_bb;
1591 else
1592 return new_bb;
1593 }
1594 }
80c7a9eb 1595 else
b7211528
SB
1596 {
1597 last = get_last_insn ();
1598 expand_expr_stmt (stmt);
1599 maybe_dump_rtl_for_tree_stmt (stmt, last);
1600 }
242229bb
JH
1601 }
1602 }
1603
a9b77cd1
ZD
1604 /* Expand implicit goto. */
1605 FOR_EACH_EDGE (e, ei, bb->succs)
1606 {
1607 if (e->flags & EDGE_FALLTHRU)
1608 break;
1609 }
1610
1611 if (e && e->dest != bb->next_bb)
1612 {
1613 emit_jump (label_rtx_for_bb (e->dest));
1614 if (e->goto_locus)
2d593c86 1615 set_curr_insn_source_location (e->goto_locus);
a9b77cd1
ZD
1616 e->flags &= ~EDGE_FALLTHRU;
1617 }
1618
242229bb
JH
1619 do_pending_stack_adjust ();
1620
3f117656 1621 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
1622 before a barrier and/or table jump insn. */
1623 last = get_last_insn ();
4b4bf941 1624 if (BARRIER_P (last))
242229bb
JH
1625 last = PREV_INSN (last);
1626 if (JUMP_TABLE_DATA_P (last))
1627 last = PREV_INSN (PREV_INSN (last));
1628 BB_END (bb) = last;
caf93cb0 1629
242229bb 1630 update_bb_for_insn (bb);
80c7a9eb 1631
242229bb
JH
1632 return bb;
1633}
1634
1635
1636/* Create a basic block for initialization code. */
1637
1638static basic_block
1639construct_init_block (void)
1640{
1641 basic_block init_block, first_block;
fd44f634
JH
1642 edge e = NULL;
1643 int flags;
275a4187 1644
fd44f634
JH
1645 /* Multiple entry points not supported yet. */
1646 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
1647 init_rtl_bb_info (ENTRY_BLOCK_PTR);
1648 init_rtl_bb_info (EXIT_BLOCK_PTR);
1649 ENTRY_BLOCK_PTR->flags |= BB_RTL;
1650 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 1651
fd44f634 1652 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 1653
fd44f634
JH
1654 /* When entry edge points to first basic block, we don't need jump,
1655 otherwise we have to jump into proper target. */
1656 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
1657 {
1658 tree label = tree_block_label (e->dest);
1659
1660 emit_jump (label_rtx (label));
1661 flags = 0;
275a4187 1662 }
fd44f634
JH
1663 else
1664 flags = EDGE_FALLTHRU;
242229bb
JH
1665
1666 init_block = create_basic_block (NEXT_INSN (get_insns ()),
1667 get_last_insn (),
1668 ENTRY_BLOCK_PTR);
1669 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
1670 init_block->count = ENTRY_BLOCK_PTR->count;
1671 if (e)
1672 {
1673 first_block = e->dest;
1674 redirect_edge_succ (e, init_block);
fd44f634 1675 e = make_edge (init_block, first_block, flags);
242229bb
JH
1676 }
1677 else
1678 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1679 e->probability = REG_BR_PROB_BASE;
1680 e->count = ENTRY_BLOCK_PTR->count;
1681
1682 update_bb_for_insn (init_block);
1683 return init_block;
1684}
1685
55e092c4
JH
1686/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
1687 found in the block tree. */
1688
1689static void
1690set_block_levels (tree block, int level)
1691{
1692 while (block)
1693 {
1694 BLOCK_NUMBER (block) = level;
1695 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
1696 block = BLOCK_CHAIN (block);
1697 }
1698}
242229bb
JH
1699
1700/* Create a block containing landing pads and similar stuff. */
1701
1702static void
1703construct_exit_block (void)
1704{
1705 rtx head = get_last_insn ();
1706 rtx end;
1707 basic_block exit_block;
628f6a4e
BE
1708 edge e, e2;
1709 unsigned ix;
1710 edge_iterator ei;
071a42f9 1711 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 1712
caf93cb0 1713 /* Make sure the locus is set to the end of the function, so that
242229bb 1714 epilogue line numbers and warnings are set properly. */
6773e15f 1715 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
1716 input_location = cfun->function_end_locus;
1717
1718 /* The following insns belong to the top scope. */
55e092c4 1719 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 1720
242229bb
JH
1721 /* Generate rtl for function exit. */
1722 expand_function_end ();
1723
1724 end = get_last_insn ();
1725 if (head == end)
1726 return;
071a42f9
JH
1727 /* While emitting the function end we could move end of the last basic block.
1728 */
1729 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 1730 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 1731 head = NEXT_INSN (head);
80c7a9eb
RH
1732 exit_block = create_basic_block (NEXT_INSN (head), end,
1733 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
1734 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
1735 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
1736
1737 ix = 0;
1738 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 1739 {
8fb790fd 1740 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 1741 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
1742 redirect_edge_succ (e, exit_block);
1743 else
1744 ix++;
242229bb 1745 }
628f6a4e 1746
242229bb
JH
1747 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
1748 e->probability = REG_BR_PROB_BASE;
1749 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 1750 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
1751 if (e2 != e)
1752 {
c22cacf3 1753 e->count -= e2->count;
242229bb
JH
1754 exit_block->count -= e2->count;
1755 exit_block->frequency -= EDGE_FREQUENCY (e2);
1756 }
1757 if (e->count < 0)
1758 e->count = 0;
1759 if (exit_block->count < 0)
1760 exit_block->count = 0;
1761 if (exit_block->frequency < 0)
1762 exit_block->frequency = 0;
1763 update_bb_for_insn (exit_block);
1764}
1765
c22cacf3 1766/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
1767 Look for ARRAY_REF nodes with non-constant indexes and mark them
1768 addressable. */
1769
1770static tree
1771discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
1772 void *data ATTRIBUTE_UNUSED)
1773{
1774 tree t = *tp;
1775
1776 if (IS_TYPE_OR_DECL_P (t))
1777 *walk_subtrees = 0;
1778 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1779 {
1780 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1781 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
1782 && (!TREE_OPERAND (t, 2)
1783 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1784 || (TREE_CODE (t) == COMPONENT_REF
1785 && (!TREE_OPERAND (t,2)
1786 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
1787 || TREE_CODE (t) == BIT_FIELD_REF
1788 || TREE_CODE (t) == REALPART_EXPR
1789 || TREE_CODE (t) == IMAGPART_EXPR
1790 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 1791 || CONVERT_EXPR_P (t))
a1b23b2f
UW
1792 t = TREE_OPERAND (t, 0);
1793
1794 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
1795 {
1796 t = get_base_address (t);
1797 if (t && DECL_P (t))
1798 TREE_ADDRESSABLE (t) = 1;
1799 }
1800
1801 *walk_subtrees = 0;
1802 }
1803
1804 return NULL_TREE;
1805}
1806
1807/* RTL expansion is not able to compile array references with variable
1808 offsets for arrays stored in single register. Discover such
1809 expressions and mark variables as addressable to avoid this
1810 scenario. */
1811
1812static void
1813discover_nonconstant_array_refs (void)
1814{
1815 basic_block bb;
1816 block_stmt_iterator bsi;
1817
1818 FOR_EACH_BB (bb)
1819 {
1820 for (bsi = bsi_start (bb); !bsi_end_p (bsi); bsi_next (&bsi))
1821 walk_tree (bsi_stmt_ptr (bsi), discover_nonconstant_array_refs_r,
1822 NULL , NULL);
1823 }
1824}
1825
242229bb
JH
1826/* Translate the intermediate representation contained in the CFG
1827 from GIMPLE trees to RTL.
1828
1829 We do conversion per basic block and preserve/update the tree CFG.
1830 This implies we have to do some magic as the CFG can simultaneously
1831 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 1832 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
1833 the expansion. */
1834
c2924966 1835static unsigned int
242229bb
JH
1836tree_expand_cfg (void)
1837{
1838 basic_block bb, init_block;
1839 sbitmap blocks;
0ef90296
ZD
1840 edge_iterator ei;
1841 edge e;
242229bb 1842
4586b4ca
SB
1843 /* Some backends want to know that we are expanding to RTL. */
1844 currently_expanding_to_rtl = 1;
1845
55e092c4
JH
1846 insn_locators_alloc ();
1847 if (!DECL_BUILT_IN (current_function_decl))
1848 set_curr_insn_source_location (DECL_SOURCE_LOCATION (current_function_decl));
1849 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1850 prologue_locator = curr_insn_locator ();
1851
1852 /* Make sure first insn is a note even if we don't want linenums.
1853 This makes sure the first insn will never be deleted.
1854 Also, final expects a note to appear there. */
1855 emit_note (NOTE_INSN_DELETED);
6429e3be 1856
a1b23b2f
UW
1857 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
1858 discover_nonconstant_array_refs ();
1859
e41b2a33 1860 targetm.expand_to_rtl_hook ();
cb91fab0
JH
1861 crtl->stack_alignment_needed = STACK_BOUNDARY;
1862 crtl->preferred_stack_boundary = STACK_BOUNDARY;
1863 cfun->cfg->max_jumptable_ents = 0;
1864
e41b2a33 1865
727a31fa 1866 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
1867 expand_used_vars ();
1868
7d69de61
RH
1869 /* Honor stack protection warnings. */
1870 if (warn_stack_protect)
1871 {
e3b5732b 1872 if (cfun->calls_alloca)
c5409249
MLI
1873 warning (OPT_Wstack_protector,
1874 "not protecting local variables: variable length buffer");
cb91fab0 1875 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
1876 warning (OPT_Wstack_protector,
1877 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
1878 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
1879 }
1880
242229bb 1881 /* Set up parameters and prepare for return, for the function. */
b79c5284 1882 expand_function_start (current_function_decl);
242229bb
JH
1883
1884 /* If this function is `main', emit a call to `__main'
1885 to run global initializers, etc. */
1886 if (DECL_NAME (current_function_decl)
1887 && MAIN_NAME_P (DECL_NAME (current_function_decl))
1888 && DECL_FILE_SCOPE_P (current_function_decl))
1889 expand_main_function ();
1890
7d69de61
RH
1891 /* Initialize the stack_protect_guard field. This must happen after the
1892 call to __main (if any) so that the external decl is initialized. */
cb91fab0 1893 if (crtl->stack_protect_guard)
7d69de61
RH
1894 stack_protect_prologue ();
1895
3fbd86b1 1896 /* Register rtl specific functions for cfg. */
242229bb
JH
1897 rtl_register_cfg_hooks ();
1898
1899 init_block = construct_init_block ();
1900
0ef90296 1901 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2a8a8292 1902 remaining edges in expand_gimple_basic_block. */
0ef90296
ZD
1903 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
1904 e->flags &= ~EDGE_EXECUTABLE;
1905
8b11009b 1906 lab_rtx_for_bb = pointer_map_create ();
242229bb 1907 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 1908 bb = expand_gimple_basic_block (bb);
8b11009b 1909 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 1910 free_histograms ();
242229bb
JH
1911
1912 construct_exit_block ();
55e092c4
JH
1913 set_curr_insn_block (DECL_INITIAL (current_function_decl));
1914 insn_locators_finalize ();
242229bb 1915
4586b4ca
SB
1916 /* We're done expanding trees to RTL. */
1917 currently_expanding_to_rtl = 0;
1918
e8a2a782 1919 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
242229bb 1920 convert_from_eh_region_ranges ();
e8a2a782 1921 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
1922
1923 rebuild_jump_labels (get_insns ());
1924 find_exception_handler_labels ();
1925
1926 blocks = sbitmap_alloc (last_basic_block);
1927 sbitmap_ones (blocks);
1928 find_many_sub_basic_blocks (blocks);
25cd19de 1929 purge_all_dead_edges ();
242229bb
JH
1930 sbitmap_free (blocks);
1931
1932 compact_blocks ();
1933#ifdef ENABLE_CHECKING
62e5bf5d 1934 verify_flow_info ();
242229bb 1935#endif
9f8628ba
PB
1936
1937 /* There's no need to defer outputting this function any more; we
1938 know we want to output it. */
1939 DECL_DEFER_OUTPUT (current_function_decl) = 0;
1940
1941 /* Now that we're done expanding trees to RTL, we shouldn't have any
1942 more CONCATs anywhere. */
1943 generating_concat_p = 0;
1944
b7211528
SB
1945 if (dump_file)
1946 {
1947 fprintf (dump_file,
1948 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
1949 /* And the pass manager will dump RTL for us. */
1950 }
ef330312
PB
1951
1952 /* If we're emitting a nested function, make sure its parent gets
1953 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 1954 {
ef330312
PB
1955 tree parent;
1956 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
1957 parent != NULL_TREE;
1958 parent = get_containing_scope (parent))
ef330312 1959 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 1960 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 1961 }
c22cacf3 1962
ef330312
PB
1963 /* We are now committed to emitting code for this function. Do any
1964 preparation, such as emitting abstract debug info for the inline
1965 before it gets mangled by optimization. */
1966 if (cgraph_function_possibly_inlined_p (current_function_decl))
1967 (*debug_hooks->outlining_inline_function) (current_function_decl);
1968
1969 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
1970
1971 /* After expanding, the return labels are no longer needed. */
1972 return_label = NULL;
1973 naked_return_label = NULL;
55e092c4
JH
1974 /* Tag the blocks with a depth number so that change_scope can find
1975 the common parent easily. */
1976 set_block_levels (DECL_INITIAL (cfun->decl), 0);
c2924966 1977 return 0;
242229bb
JH
1978}
1979
e3b5732b 1980struct rtl_opt_pass pass_expand =
242229bb 1981{
8ddbbcae 1982 {
e3b5732b 1983 RTL_PASS,
c22cacf3 1984 "expand", /* name */
242229bb 1985 NULL, /* gate */
c22cacf3 1986 tree_expand_cfg, /* execute */
242229bb
JH
1987 NULL, /* sub */
1988 NULL, /* next */
1989 0, /* static_pass_number */
c22cacf3 1990 TV_EXPAND, /* tv_id */
242229bb
JH
1991 /* ??? If TER is enabled, we actually receive GENERIC. */
1992 PROP_gimple_leh | PROP_cfg, /* properties_required */
1993 PROP_rtl, /* properties_provided */
bbbe4e7b 1994 PROP_trees, /* properties_destroyed */
242229bb 1995 0, /* todo_flags_start */
ef330312 1996 TODO_dump_func, /* todo_flags_finish */
8ddbbcae 1997 }
242229bb 1998};