]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
* xstrdup.c: Include <sys/types.h> after "config.h"
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
2d593c86 2 Copyright (C) 2004, 2005, 2006, 2007, 2008 Free Software Foundation, Inc.
242229bb
JH
3
4This file is part of GCC.
5
6GCC is free software; you can redistribute it and/or modify
7it under the terms of the GNU General Public License as published by
9dcd6f09 8the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
9any later version.
10
11GCC is distributed in the hope that it will be useful,
12but WITHOUT ANY WARRANTY; without even the implied warranty of
13MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14GNU General Public License for more details.
15
16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
242229bb
JH
19
20#include "config.h"
21#include "system.h"
22#include "coretypes.h"
23#include "tm.h"
24#include "tree.h"
25#include "rtl.h"
26#include "tm_p.h"
27#include "basic-block.h"
28#include "function.h"
29#include "expr.h"
30#include "langhooks.h"
31#include "tree-flow.h"
32#include "timevar.h"
33#include "tree-dump.h"
34#include "tree-pass.h"
35#include "except.h"
36#include "flags.h"
1f6d3a08
RH
37#include "diagnostic.h"
38#include "toplev.h"
ef330312 39#include "debug.h"
7d69de61 40#include "params.h"
ff28a94d 41#include "tree-inline.h"
6946b3f7 42#include "value-prof.h"
e41b2a33 43#include "target.h"
7d69de61 44
726a989a
RB
45
46/* Return an expression tree corresponding to the RHS of GIMPLE
47 statement STMT. */
48
49tree
50gimple_assign_rhs_to_tree (gimple stmt)
51{
52 tree t;
82d6e6fc 53 enum gimple_rhs_class grhs_class;
726a989a 54
82d6e6fc 55 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 56
82d6e6fc 57 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
58 t = build2 (gimple_assign_rhs_code (stmt),
59 TREE_TYPE (gimple_assign_lhs (stmt)),
60 gimple_assign_rhs1 (stmt),
61 gimple_assign_rhs2 (stmt));
82d6e6fc 62 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
63 t = build1 (gimple_assign_rhs_code (stmt),
64 TREE_TYPE (gimple_assign_lhs (stmt)),
65 gimple_assign_rhs1 (stmt));
82d6e6fc 66 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
67 t = gimple_assign_rhs1 (stmt);
68 else
69 gcc_unreachable ();
70
71 return t;
72}
73
74/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
75 statement STMT. */
76
77static tree
78gimple_cond_pred_to_tree (gimple stmt)
79{
80 return build2 (gimple_cond_code (stmt), boolean_type_node,
81 gimple_cond_lhs (stmt), gimple_cond_rhs (stmt));
82}
83
84/* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
85 inside *TP. DATA is the location to set. */
86
87static tree
88set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
89{
90 location_t *loc = (location_t *) data;
91 if (EXPR_P (*tp))
92 SET_EXPR_LOCATION (*tp, *loc);
93
94 return NULL_TREE;
95}
96
97
98/* RTL expansion has traditionally been done on trees, so the
99 transition to doing it on GIMPLE tuples is very invasive to the RTL
100 expander. To facilitate the transition, this function takes a
101 GIMPLE tuple STMT and returns the same statement in the form of a
102 tree. */
103
104static tree
105gimple_to_tree (gimple stmt)
106{
107 tree t;
108 int rn;
109 tree_ann_common_t ann;
110 location_t loc;
111
112 switch (gimple_code (stmt))
113 {
114 case GIMPLE_ASSIGN:
115 {
116 tree lhs = gimple_assign_lhs (stmt);
117
118 t = gimple_assign_rhs_to_tree (stmt);
119 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
120 if (gimple_assign_nontemporal_move_p (stmt))
121 MOVE_NONTEMPORAL (t) = true;
122 }
123 break;
124
125 case GIMPLE_COND:
126 t = gimple_cond_pred_to_tree (stmt);
127 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
128 break;
129
130 case GIMPLE_GOTO:
131 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
132 break;
133
134 case GIMPLE_LABEL:
135 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
136 break;
137
138 case GIMPLE_RETURN:
139 {
140 tree retval = gimple_return_retval (stmt);
141
142 if (retval && retval != error_mark_node)
143 {
144 tree result = DECL_RESULT (current_function_decl);
145
146 /* If we are not returning the current function's RESULT_DECL,
147 build an assignment to it. */
148 if (retval != result)
149 {
150 /* I believe that a function's RESULT_DECL is unique. */
151 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
152
153 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
154 result, retval);
155 }
156 }
157 t = build1 (RETURN_EXPR, void_type_node, retval);
158 }
159 break;
160
161 case GIMPLE_ASM:
162 {
163 size_t i, n;
164 tree out, in, cl;
165 const char *s;
166
167 out = NULL_TREE;
168 n = gimple_asm_noutputs (stmt);
169 if (n > 0)
170 {
171 t = out = gimple_asm_output_op (stmt, 0);
172 for (i = 1; i < n; i++)
173 {
174 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
175 t = gimple_asm_output_op (stmt, i);
176 }
177 }
178
179 in = NULL_TREE;
180 n = gimple_asm_ninputs (stmt);
181 if (n > 0)
182 {
183 t = in = gimple_asm_input_op (stmt, 0);
184 for (i = 1; i < n; i++)
185 {
186 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
187 t = gimple_asm_input_op (stmt, i);
188 }
189 }
190
191 cl = NULL_TREE;
192 n = gimple_asm_nclobbers (stmt);
193 if (n > 0)
194 {
195 t = cl = gimple_asm_clobber_op (stmt, 0);
196 for (i = 1; i < n; i++)
197 {
198 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
199 t = gimple_asm_clobber_op (stmt, i);
200 }
201 }
202
203 s = gimple_asm_string (stmt);
204 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
205 out, in, cl);
206 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
207 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
208 }
209 break;
210
211 case GIMPLE_CALL:
212 {
213 size_t i;
214 tree fn;
215 tree_ann_common_t ann;
216
217 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
218
7c9577be 219 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
726a989a 220 TREE_TYPE (t) = gimple_call_return_type (stmt);
726a989a
RB
221 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
222
223 for (i = 0; i < gimple_call_num_args (stmt); i++)
224 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
225
226 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
227 TREE_SIDE_EFFECTS (t) = 1;
228
229 if (gimple_call_flags (stmt) & ECF_NOTHROW)
230 TREE_NOTHROW (t) = 1;
231
232 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
233 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
234 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
235 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
236 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
237
238 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
239 {
240 tree lhs = gimple_call_lhs (stmt);
241
242 if (lhs)
243 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
244 }
245
246 /* Record the original call statement, as it may be used
247 to retrieve profile information during expansion. */
7c9577be
RG
248
249 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
250 && DECL_BUILT_IN (fn))
726a989a
RB
251 {
252 ann = get_tree_common_ann (t);
253 ann->stmt = stmt;
254 }
255 }
256 break;
257
258 case GIMPLE_SWITCH:
259 {
260 tree label_vec;
261 size_t i;
262 tree elt = gimple_switch_label (stmt, 0);
263
264 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
265
266 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
267 {
268 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
269 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
270
271 /* The default case in a SWITCH_EXPR must be at the end of
272 the label vector. */
273 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
274 }
275 else
276 {
277 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
278 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
279 }
280
281 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
282 NULL, label_vec);
283 }
284 break;
285
286 case GIMPLE_NOP:
287 case GIMPLE_PREDICT:
288 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
289 break;
290
291 case GIMPLE_RESX:
292 t = build_resx (gimple_resx_region (stmt));
293 break;
294
295 default:
296 if (errorcount == 0)
297 {
298 error ("Unrecognized GIMPLE statement during RTL expansion");
299 print_gimple_stmt (stderr, stmt, 4, 0);
300 gcc_unreachable ();
301 }
302 else
303 {
304 /* Ignore any bad gimple codes if we're going to die anyhow,
305 so we can at least set TREE_ASM_WRITTEN and have the rest
306 of compilation advance without sudden ICE death. */
307 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
308 break;
309 }
310 }
311
312 /* If STMT is inside an exception region, record it in the generated
313 expression. */
314 rn = lookup_stmt_eh_region (stmt);
315 if (rn >= 0)
316 {
317 tree call = get_call_expr_in (t);
318
319 ann = get_tree_common_ann (t);
320 ann->rn = rn;
321
322 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
323 the CALL_EXPR not the assignment statment for EH region number. */
324 if (call && call != t)
325 {
326 ann = get_tree_common_ann (call);
327 ann->rn = rn;
328 }
329 }
330
331 /* Set EXPR_LOCATION in all the embedded expressions. */
332 loc = gimple_location (stmt);
333 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
334
335 TREE_BLOCK (t) = gimple_block (stmt);
336
337 return t;
338}
339
340
341/* Release back to GC memory allocated by gimple_to_tree. */
342
343static void
344release_stmt_tree (gimple stmt, tree stmt_tree)
345{
346 tree_ann_common_t ann;
347
348 switch (gimple_code (stmt))
349 {
350 case GIMPLE_ASSIGN:
351 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
352 ggc_free (TREE_OPERAND (stmt_tree, 1));
353 break;
354 case GIMPLE_COND:
355 ggc_free (COND_EXPR_COND (stmt_tree));
356 break;
357 case GIMPLE_RETURN:
358 if (TREE_OPERAND (stmt_tree, 0)
359 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
360 ggc_free (TREE_OPERAND (stmt_tree, 0));
361 break;
362 case GIMPLE_CALL:
363 if (gimple_call_lhs (stmt))
364 {
726a989a
RB
365 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
366 if (ann)
367 ggc_free (ann);
368 ggc_free (TREE_OPERAND (stmt_tree, 1));
369 }
726a989a
RB
370 break;
371 default:
372 break;
373 }
374 ann = tree_common_ann (stmt_tree);
375 if (ann)
376 ggc_free (ann);
377 ggc_free (stmt_tree);
378}
379
380
e53de54d
JH
381/* Verify that there is exactly single jump instruction since last and attach
382 REG_BR_PROB note specifying probability.
383 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
384 re-distribute it when the conditional expands into multiple conditionals.
385 This is however difficult to do. */
ef950eba 386void
10d22567 387add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
388{
389 if (profile_status == PROFILE_ABSENT)
390 return;
391 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 392 if (JUMP_P (last))
e53de54d
JH
393 {
394 /* It is common to emit condjump-around-jump sequence when we don't know
395 how to reverse the conditional. Special case this. */
396 if (!any_condjump_p (last)
2ca202e7 397 || !JUMP_P (NEXT_INSN (last))
e53de54d 398 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 399 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 400 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 401 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 402 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
403 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
404 goto failed;
41806d92 405 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6
ILT
406 add_reg_note (last, REG_BR_PROB,
407 GEN_INT (REG_BR_PROB_BASE - probability));
e53de54d
JH
408 return;
409 }
2ca202e7 410 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
411 goto failed;
412 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6 413 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
e53de54d
JH
414 return;
415failed:
416 if (dump_file)
417 fprintf (dump_file, "Failed to add probability note\n");
418}
419
80c7a9eb 420
1f6d3a08
RH
421#ifndef STACK_ALIGNMENT_NEEDED
422#define STACK_ALIGNMENT_NEEDED 1
423#endif
424
1f6d3a08
RH
425
426/* This structure holds data relevant to one variable that will be
427 placed in a stack slot. */
428struct stack_var
429{
430 /* The Variable. */
431 tree decl;
432
433 /* The offset of the variable. During partitioning, this is the
434 offset relative to the partition. After partitioning, this
435 is relative to the stack frame. */
436 HOST_WIDE_INT offset;
437
438 /* Initially, the size of the variable. Later, the size of the partition,
439 if this variable becomes it's partition's representative. */
440 HOST_WIDE_INT size;
441
442 /* The *byte* alignment required for this variable. Or as, with the
443 size, the alignment for this partition. */
444 unsigned int alignb;
445
446 /* The partition representative. */
447 size_t representative;
448
449 /* The next stack variable in the partition, or EOC. */
450 size_t next;
451};
452
453#define EOC ((size_t)-1)
454
455/* We have an array of such objects while deciding allocation. */
456static struct stack_var *stack_vars;
457static size_t stack_vars_alloc;
458static size_t stack_vars_num;
459
fa10beec 460/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
461 is non-decreasing. */
462static size_t *stack_vars_sorted;
463
464/* We have an interference graph between such objects. This graph
465 is lower triangular. */
466static bool *stack_vars_conflict;
467static size_t stack_vars_conflict_alloc;
468
469/* The phase of the stack frame. This is the known misalignment of
470 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
471 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
472static int frame_phase;
473
7d69de61
RH
474/* Used during expand_used_vars to remember if we saw any decls for
475 which we'd like to enable stack smashing protection. */
476static bool has_protected_decls;
477
478/* Used during expand_used_vars. Remember if we say a character buffer
479 smaller than our cutoff threshold. Used for -Wstack-protector. */
480static bool has_short_buffer;
1f6d3a08
RH
481
482/* Discover the byte alignment to use for DECL. Ignore alignment
483 we can't do with expected alignment of the stack boundary. */
484
485static unsigned int
486get_decl_align_unit (tree decl)
487{
488 unsigned int align;
489
490 align = DECL_ALIGN (decl);
491 align = LOCAL_ALIGNMENT (TREE_TYPE (decl), align);
2e3f842f
L
492
493 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
494 align = MAX_SUPPORTED_STACK_ALIGNMENT;
495
496 if (SUPPORTS_STACK_ALIGNMENT)
497 {
498 if (crtl->stack_alignment_estimated < align)
499 {
500 gcc_assert(!crtl->stack_realign_processed);
501 crtl->stack_alignment_estimated = align;
502 }
503 }
504
505 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
506 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
507 if (crtl->stack_alignment_needed < align)
508 crtl->stack_alignment_needed = align;
2e3f842f
L
509 if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed)
510 crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed;
1f6d3a08
RH
511
512 return align / BITS_PER_UNIT;
513}
514
515/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
516 Return the frame offset. */
517
518static HOST_WIDE_INT
519alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
520{
521 HOST_WIDE_INT offset, new_frame_offset;
522
523 new_frame_offset = frame_offset;
524 if (FRAME_GROWS_DOWNWARD)
525 {
526 new_frame_offset -= size + frame_phase;
527 new_frame_offset &= -align;
528 new_frame_offset += frame_phase;
529 offset = new_frame_offset;
530 }
531 else
532 {
533 new_frame_offset -= frame_phase;
534 new_frame_offset += align - 1;
535 new_frame_offset &= -align;
536 new_frame_offset += frame_phase;
537 offset = new_frame_offset;
538 new_frame_offset += size;
539 }
540 frame_offset = new_frame_offset;
541
9fb798d7
EB
542 if (frame_offset_overflow (frame_offset, cfun->decl))
543 frame_offset = offset = 0;
544
1f6d3a08
RH
545 return offset;
546}
547
548/* Accumulate DECL into STACK_VARS. */
549
550static void
551add_stack_var (tree decl)
552{
553 if (stack_vars_num >= stack_vars_alloc)
554 {
555 if (stack_vars_alloc)
556 stack_vars_alloc = stack_vars_alloc * 3 / 2;
557 else
558 stack_vars_alloc = 32;
559 stack_vars
560 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
561 }
562 stack_vars[stack_vars_num].decl = decl;
563 stack_vars[stack_vars_num].offset = 0;
564 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1);
565 stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl);
566
567 /* All variables are initially in their own partition. */
568 stack_vars[stack_vars_num].representative = stack_vars_num;
569 stack_vars[stack_vars_num].next = EOC;
570
571 /* Ensure that this decl doesn't get put onto the list twice. */
572 SET_DECL_RTL (decl, pc_rtx);
573
574 stack_vars_num++;
575}
576
577/* Compute the linear index of a lower-triangular coordinate (I, J). */
578
579static size_t
580triangular_index (size_t i, size_t j)
581{
582 if (i < j)
583 {
584 size_t t;
585 t = i, i = j, j = t;
586 }
587 return (i * (i + 1)) / 2 + j;
588}
589
590/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
591
592static void
593resize_stack_vars_conflict (size_t n)
594{
595 size_t size = triangular_index (n-1, n-1) + 1;
596
597 if (size <= stack_vars_conflict_alloc)
598 return;
599
600 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
601 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
602 (size - stack_vars_conflict_alloc) * sizeof (bool));
603 stack_vars_conflict_alloc = size;
604}
605
606/* Make the decls associated with luid's X and Y conflict. */
607
608static void
609add_stack_var_conflict (size_t x, size_t y)
610{
611 size_t index = triangular_index (x, y);
612 gcc_assert (index < stack_vars_conflict_alloc);
613 stack_vars_conflict[index] = true;
614}
615
616/* Check whether the decls associated with luid's X and Y conflict. */
617
618static bool
619stack_var_conflict_p (size_t x, size_t y)
620{
621 size_t index = triangular_index (x, y);
622 gcc_assert (index < stack_vars_conflict_alloc);
623 return stack_vars_conflict[index];
624}
d239ed56
SB
625
626/* Returns true if TYPE is or contains a union type. */
627
628static bool
629aggregate_contains_union_type (tree type)
630{
631 tree field;
632
633 if (TREE_CODE (type) == UNION_TYPE
634 || TREE_CODE (type) == QUAL_UNION_TYPE)
635 return true;
636 if (TREE_CODE (type) == ARRAY_TYPE)
637 return aggregate_contains_union_type (TREE_TYPE (type));
638 if (TREE_CODE (type) != RECORD_TYPE)
639 return false;
640
641 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
642 if (TREE_CODE (field) == FIELD_DECL)
643 if (aggregate_contains_union_type (TREE_TYPE (field)))
644 return true;
645
646 return false;
647}
648
1f6d3a08
RH
649/* A subroutine of expand_used_vars. If two variables X and Y have alias
650 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
651 in the interference graph. We also need to make sure to add conflicts
652 for union containing structures. Else RTL alias analysis comes along
653 and due to type based aliasing rules decides that for two overlapping
654 union temporaries { short s; int i; } accesses to the same mem through
655 different types may not alias and happily reorders stores across
656 life-time boundaries of the temporaries (See PR25654).
657 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
658
659static void
660add_alias_set_conflicts (void)
661{
662 size_t i, j, n = stack_vars_num;
663
664 for (i = 0; i < n; ++i)
665 {
a4d25453
RH
666 tree type_i = TREE_TYPE (stack_vars[i].decl);
667 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 668 bool contains_union;
1f6d3a08 669
d239ed56 670 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
671 for (j = 0; j < i; ++j)
672 {
a4d25453
RH
673 tree type_j = TREE_TYPE (stack_vars[j].decl);
674 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
675 if (aggr_i != aggr_j
676 /* Either the objects conflict by means of type based
677 aliasing rules, or we need to add a conflict. */
678 || !objects_must_conflict_p (type_i, type_j)
679 /* In case the types do not conflict ensure that access
680 to elements will conflict. In case of unions we have
681 to be careful as type based aliasing rules may say
682 access to the same memory does not conflict. So play
683 safe and add a conflict in this case. */
684 || contains_union)
1f6d3a08
RH
685 add_stack_var_conflict (i, j);
686 }
687 }
688}
689
690/* A subroutine of partition_stack_vars. A comparison function for qsort,
fa10beec 691 sorting an array of indices by the size of the object. */
1f6d3a08
RH
692
693static int
694stack_var_size_cmp (const void *a, const void *b)
695{
696 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
697 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
79f802f5
RG
698 unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl);
699 unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl);
1f6d3a08
RH
700
701 if (sa < sb)
702 return -1;
703 if (sa > sb)
704 return 1;
79f802f5
RG
705 /* For stack variables of the same size use the uid of the decl
706 to make the sort stable. */
707 if (uida < uidb)
708 return -1;
709 if (uida > uidb)
710 return 1;
1f6d3a08
RH
711 return 0;
712}
713
714/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
715 partitioning algorithm. Partitions A and B are known to be non-conflicting.
716 Merge them into a single partition A.
717
718 At the same time, add OFFSET to all variables in partition B. At the end
719 of the partitioning process we've have a nice block easy to lay out within
720 the stack frame. */
721
722static void
723union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
724{
725 size_t i, last;
726
727 /* Update each element of partition B with the given offset,
728 and merge them into partition A. */
729 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
730 {
731 stack_vars[i].offset += offset;
732 stack_vars[i].representative = a;
733 }
734 stack_vars[last].next = stack_vars[a].next;
735 stack_vars[a].next = b;
736
737 /* Update the required alignment of partition A to account for B. */
738 if (stack_vars[a].alignb < stack_vars[b].alignb)
739 stack_vars[a].alignb = stack_vars[b].alignb;
740
741 /* Update the interference graph and merge the conflicts. */
742 for (last = stack_vars_num, i = 0; i < last; ++i)
743 if (stack_var_conflict_p (b, i))
744 add_stack_var_conflict (a, i);
745}
746
747/* A subroutine of expand_used_vars. Binpack the variables into
748 partitions constrained by the interference graph. The overall
749 algorithm used is as follows:
750
751 Sort the objects by size.
752 For each object A {
753 S = size(A)
754 O = 0
755 loop {
756 Look for the largest non-conflicting object B with size <= S.
757 UNION (A, B)
758 offset(B) = O
759 O += size(B)
760 S -= size(B)
761 }
762 }
763*/
764
765static void
766partition_stack_vars (void)
767{
768 size_t si, sj, n = stack_vars_num;
769
770 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
771 for (si = 0; si < n; ++si)
772 stack_vars_sorted[si] = si;
773
774 if (n == 1)
775 return;
776
777 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
778
779 /* Special case: detect when all variables conflict, and thus we can't
780 do anything during the partitioning loop. It isn't uncommon (with
781 C code at least) to declare all variables at the top of the function,
782 and if we're not inlining, then all variables will be in the same scope.
783 Take advantage of very fast libc routines for this scan. */
784 gcc_assert (sizeof(bool) == sizeof(char));
785 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
786 return;
787
788 for (si = 0; si < n; ++si)
789 {
790 size_t i = stack_vars_sorted[si];
791 HOST_WIDE_INT isize = stack_vars[i].size;
792 HOST_WIDE_INT offset = 0;
793
794 for (sj = si; sj-- > 0; )
795 {
796 size_t j = stack_vars_sorted[sj];
797 HOST_WIDE_INT jsize = stack_vars[j].size;
798 unsigned int jalign = stack_vars[j].alignb;
799
800 /* Ignore objects that aren't partition representatives. */
801 if (stack_vars[j].representative != j)
802 continue;
803
804 /* Ignore objects too large for the remaining space. */
805 if (isize < jsize)
806 continue;
807
808 /* Ignore conflicting objects. */
809 if (stack_var_conflict_p (i, j))
810 continue;
811
812 /* Refine the remaining space check to include alignment. */
813 if (offset & (jalign - 1))
814 {
815 HOST_WIDE_INT toff = offset;
816 toff += jalign - 1;
817 toff &= -(HOST_WIDE_INT)jalign;
818 if (isize - (toff - offset) < jsize)
819 continue;
820
821 isize -= toff - offset;
822 offset = toff;
823 }
824
825 /* UNION the objects, placing J at OFFSET. */
826 union_stack_vars (i, j, offset);
827
828 isize -= jsize;
829 if (isize == 0)
830 break;
831 }
832 }
833}
834
835/* A debugging aid for expand_used_vars. Dump the generated partitions. */
836
837static void
838dump_stack_var_partition (void)
839{
840 size_t si, i, j, n = stack_vars_num;
841
842 for (si = 0; si < n; ++si)
843 {
844 i = stack_vars_sorted[si];
845
846 /* Skip variables that aren't partition representatives, for now. */
847 if (stack_vars[i].representative != i)
848 continue;
849
850 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
851 " align %u\n", (unsigned long) i, stack_vars[i].size,
852 stack_vars[i].alignb);
853
854 for (j = i; j != EOC; j = stack_vars[j].next)
855 {
856 fputc ('\t', dump_file);
857 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
858 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 859 stack_vars[j].offset);
1f6d3a08
RH
860 }
861 }
862}
863
864/* Assign rtl to DECL at frame offset OFFSET. */
865
866static void
867expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
868{
869 HOST_WIDE_INT align;
870 rtx x;
c22cacf3 871
1f6d3a08
RH
872 /* If this fails, we've overflowed the stack frame. Error nicely? */
873 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
874
875 x = plus_constant (virtual_stack_vars_rtx, offset);
876 x = gen_rtx_MEM (DECL_MODE (decl), x);
877
878 /* Set alignment we actually gave this decl. */
879 offset -= frame_phase;
880 align = offset & -offset;
881 align *= BITS_PER_UNIT;
882 if (align > STACK_BOUNDARY || align == 0)
883 align = STACK_BOUNDARY;
884 DECL_ALIGN (decl) = align;
885 DECL_USER_ALIGN (decl) = 0;
886
887 set_mem_attributes (x, decl, true);
888 SET_DECL_RTL (decl, x);
889}
890
891/* A subroutine of expand_used_vars. Give each partition representative
892 a unique location within the stack frame. Update each partition member
893 with that location. */
894
895static void
7d69de61 896expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
897{
898 size_t si, i, j, n = stack_vars_num;
899
900 for (si = 0; si < n; ++si)
901 {
902 HOST_WIDE_INT offset;
903
904 i = stack_vars_sorted[si];
905
906 /* Skip variables that aren't partition representatives, for now. */
907 if (stack_vars[i].representative != i)
908 continue;
909
7d69de61
RH
910 /* Skip variables that have already had rtl assigned. See also
911 add_stack_var where we perpetrate this pc_rtx hack. */
912 if (DECL_RTL (stack_vars[i].decl) != pc_rtx)
913 continue;
914
c22cacf3 915 /* Check the predicate to see whether this variable should be
7d69de61
RH
916 allocated in this pass. */
917 if (pred && !pred (stack_vars[i].decl))
918 continue;
919
1f6d3a08
RH
920 offset = alloc_stack_frame_space (stack_vars[i].size,
921 stack_vars[i].alignb);
922
923 /* Create rtl for each variable based on their location within the
924 partition. */
925 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
926 {
927 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
928 expand_one_stack_var_at (stack_vars[j].decl,
929 stack_vars[j].offset + offset);
930 }
1f6d3a08
RH
931 }
932}
933
ff28a94d
JH
934/* Take into account all sizes of partitions and reset DECL_RTLs. */
935static HOST_WIDE_INT
936account_stack_vars (void)
937{
938 size_t si, j, i, n = stack_vars_num;
939 HOST_WIDE_INT size = 0;
940
941 for (si = 0; si < n; ++si)
942 {
943 i = stack_vars_sorted[si];
944
945 /* Skip variables that aren't partition representatives, for now. */
946 if (stack_vars[i].representative != i)
947 continue;
948
949 size += stack_vars[i].size;
950 for (j = i; j != EOC; j = stack_vars[j].next)
951 SET_DECL_RTL (stack_vars[j].decl, NULL);
952 }
953 return size;
954}
955
1f6d3a08
RH
956/* A subroutine of expand_one_var. Called to immediately assign rtl
957 to a variable to be allocated in the stack frame. */
958
959static void
960expand_one_stack_var (tree var)
961{
962 HOST_WIDE_INT size, offset, align;
963
964 size = tree_low_cst (DECL_SIZE_UNIT (var), 1);
965 align = get_decl_align_unit (var);
966 offset = alloc_stack_frame_space (size, align);
967
968 expand_one_stack_var_at (var, offset);
969}
970
1f6d3a08
RH
971/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
972 that will reside in a hard register. */
973
974static void
975expand_one_hard_reg_var (tree var)
976{
977 rest_of_decl_compilation (var, 0, 0);
978}
979
980/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
981 that will reside in a pseudo register. */
982
983static void
984expand_one_register_var (tree var)
985{
986 tree type = TREE_TYPE (var);
987 int unsignedp = TYPE_UNSIGNED (type);
988 enum machine_mode reg_mode
989 = promote_mode (type, DECL_MODE (var), &unsignedp, 0);
990 rtx x = gen_reg_rtx (reg_mode);
991
992 SET_DECL_RTL (var, x);
993
994 /* Note if the object is a user variable. */
995 if (!DECL_ARTIFICIAL (var))
1f6d3a08
RH
996 mark_user_reg (x);
997
61021c2c
AP
998 if (POINTER_TYPE_P (type))
999 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var))));
1f6d3a08
RH
1000}
1001
1002/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1003 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1004 to pick something that won't crash the rest of the compiler. */
1005
1006static void
1007expand_one_error_var (tree var)
1008{
1009 enum machine_mode mode = DECL_MODE (var);
1010 rtx x;
1011
1012 if (mode == BLKmode)
1013 x = gen_rtx_MEM (BLKmode, const0_rtx);
1014 else if (mode == VOIDmode)
1015 x = const0_rtx;
1016 else
1017 x = gen_reg_rtx (mode);
1018
1019 SET_DECL_RTL (var, x);
1020}
1021
c22cacf3 1022/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1023 allocated to the local stack frame. Return true if we wish to
1024 add VAR to STACK_VARS so that it will be coalesced with other
1025 variables. Return false to allocate VAR immediately.
1026
1027 This function is used to reduce the number of variables considered
1028 for coalescing, which reduces the size of the quadratic problem. */
1029
1030static bool
1031defer_stack_allocation (tree var, bool toplevel)
1032{
7d69de61
RH
1033 /* If stack protection is enabled, *all* stack variables must be deferred,
1034 so that we can re-order the strings to the top of the frame. */
1035 if (flag_stack_protect)
1036 return true;
1037
1f6d3a08
RH
1038 /* Variables in the outermost scope automatically conflict with
1039 every other variable. The only reason to want to defer them
1040 at all is that, after sorting, we can more efficiently pack
1041 small variables in the stack frame. Continue to defer at -O2. */
1042 if (toplevel && optimize < 2)
1043 return false;
1044
1045 /* Without optimization, *most* variables are allocated from the
1046 stack, which makes the quadratic problem large exactly when we
c22cacf3 1047 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1048 other hand, we don't want the function's stack frame size to
1049 get completely out of hand. So we avoid adding scalars and
1050 "small" aggregates to the list at all. */
1051 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1052 return false;
1053
1054 return true;
1055}
1056
1057/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1058 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
1059 expanded yet, merely recorded.
1060 When REALLY_EXPAND is false, only add stack values to be allocated.
1061 Return stack usage this variable is supposed to take.
1062*/
1f6d3a08 1063
ff28a94d
JH
1064static HOST_WIDE_INT
1065expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1066{
2e3f842f
L
1067 if (SUPPORTS_STACK_ALIGNMENT
1068 && TREE_TYPE (var) != error_mark_node
1069 && TREE_CODE (var) == VAR_DECL)
1070 {
1071 unsigned int align;
1072
1073 /* Because we don't know if VAR will be in register or on stack,
1074 we conservatively assume it will be on stack even if VAR is
1075 eventually put into register after RA pass. For non-automatic
1076 variables, which won't be on stack, we collect alignment of
1077 type and ignore user specified alignment. */
1078 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1079 align = TYPE_ALIGN (TREE_TYPE (var));
1080 else
1081 align = DECL_ALIGN (var);
1082
1083 if (crtl->stack_alignment_estimated < align)
1084 {
1085 /* stack_alignment_estimated shouldn't change after stack
1086 realign decision made */
1087 gcc_assert(!crtl->stack_realign_processed);
1088 crtl->stack_alignment_estimated = align;
1089 }
1090 }
1091
1f6d3a08 1092 if (TREE_CODE (var) != VAR_DECL)
4846b435 1093 ;
1f6d3a08
RH
1094 else if (DECL_EXTERNAL (var))
1095 ;
833b3afe 1096 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1097 ;
1098 else if (TREE_STATIC (var))
7e8b322a 1099 ;
1f6d3a08
RH
1100 else if (DECL_RTL_SET_P (var))
1101 ;
1102 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1103 {
1104 if (really_expand)
1105 expand_one_error_var (var);
1106 }
1f6d3a08 1107 else if (DECL_HARD_REGISTER (var))
ff28a94d
JH
1108 {
1109 if (really_expand)
1110 expand_one_hard_reg_var (var);
1111 }
1f6d3a08 1112 else if (use_register_for_decl (var))
ff28a94d
JH
1113 {
1114 if (really_expand)
1115 expand_one_register_var (var);
1116 }
1f6d3a08
RH
1117 else if (defer_stack_allocation (var, toplevel))
1118 add_stack_var (var);
1119 else
ff28a94d 1120 {
bd9f1b4b
JH
1121 if (really_expand)
1122 expand_one_stack_var (var);
ff28a94d
JH
1123 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1124 }
1125 return 0;
1f6d3a08
RH
1126}
1127
1128/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1129 expanding variables. Those variables that can be put into registers
1130 are allocated pseudos; those that can't are put on the stack.
1131
1132 TOPLEVEL is true if this is the outermost BLOCK. */
1133
1134static void
1135expand_used_vars_for_block (tree block, bool toplevel)
1136{
1137 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1138 tree t;
1139
1140 old_sv_num = toplevel ? 0 : stack_vars_num;
1141
1142 /* Expand all variables at this level. */
1143 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1144 if (TREE_USED (t))
ff28a94d 1145 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1146
1147 this_sv_num = stack_vars_num;
1148
1149 /* Expand all variables at containing levels. */
1150 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1151 expand_used_vars_for_block (t, false);
1152
1153 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1154 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
1155 tree in the interference graph. Here we cause all variables at this
1156 level, and all sublevels, to conflict. Do make certain that a
1157 variable conflicts with itself. */
1158 if (old_sv_num < this_sv_num)
1159 {
1160 new_sv_num = stack_vars_num;
1161 resize_stack_vars_conflict (new_sv_num);
1162
1163 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
1164 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1165 add_stack_var_conflict (i, j);
1f6d3a08
RH
1166 }
1167}
1168
1169/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1170 and clear TREE_USED on all local variables. */
1171
1172static void
1173clear_tree_used (tree block)
1174{
1175 tree t;
1176
1177 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1178 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1179 TREE_USED (t) = 0;
1180
1181 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1182 clear_tree_used (t);
1183}
1184
7d69de61
RH
1185/* Examine TYPE and determine a bit mask of the following features. */
1186
1187#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1188#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1189#define SPCT_HAS_ARRAY 4
1190#define SPCT_HAS_AGGREGATE 8
1191
1192static unsigned int
1193stack_protect_classify_type (tree type)
1194{
1195 unsigned int ret = 0;
1196 tree t;
1197
1198 switch (TREE_CODE (type))
1199 {
1200 case ARRAY_TYPE:
1201 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1202 if (t == char_type_node
1203 || t == signed_char_type_node
1204 || t == unsigned_char_type_node)
1205 {
15362b89
JJ
1206 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1207 unsigned HOST_WIDE_INT len;
7d69de61 1208
15362b89
JJ
1209 if (!TYPE_SIZE_UNIT (type)
1210 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1211 len = max;
7d69de61 1212 else
15362b89 1213 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1214
1215 if (len < max)
1216 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1217 else
1218 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1219 }
1220 else
1221 ret = SPCT_HAS_ARRAY;
1222 break;
1223
1224 case UNION_TYPE:
1225 case QUAL_UNION_TYPE:
1226 case RECORD_TYPE:
1227 ret = SPCT_HAS_AGGREGATE;
1228 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1229 if (TREE_CODE (t) == FIELD_DECL)
1230 ret |= stack_protect_classify_type (TREE_TYPE (t));
1231 break;
1232
1233 default:
1234 break;
1235 }
1236
1237 return ret;
1238}
1239
a4d05547
KH
1240/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1241 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1242 any variable in this function. The return value is the phase number in
1243 which the variable should be allocated. */
1244
1245static int
1246stack_protect_decl_phase (tree decl)
1247{
1248 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1249 int ret = 0;
1250
1251 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1252 has_short_buffer = true;
1253
1254 if (flag_stack_protect == 2)
1255 {
1256 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1257 && !(bits & SPCT_HAS_AGGREGATE))
1258 ret = 1;
1259 else if (bits & SPCT_HAS_ARRAY)
1260 ret = 2;
1261 }
1262 else
1263 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1264
1265 if (ret)
1266 has_protected_decls = true;
1267
1268 return ret;
1269}
1270
1271/* Two helper routines that check for phase 1 and phase 2. These are used
1272 as callbacks for expand_stack_vars. */
1273
1274static bool
1275stack_protect_decl_phase_1 (tree decl)
1276{
1277 return stack_protect_decl_phase (decl) == 1;
1278}
1279
1280static bool
1281stack_protect_decl_phase_2 (tree decl)
1282{
1283 return stack_protect_decl_phase (decl) == 2;
1284}
1285
1286/* Ensure that variables in different stack protection phases conflict
1287 so that they are not merged and share the same stack slot. */
1288
1289static void
1290add_stack_protection_conflicts (void)
1291{
1292 size_t i, j, n = stack_vars_num;
1293 unsigned char *phase;
1294
1295 phase = XNEWVEC (unsigned char, n);
1296 for (i = 0; i < n; ++i)
1297 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1298
1299 for (i = 0; i < n; ++i)
1300 {
1301 unsigned char ph_i = phase[i];
1302 for (j = 0; j < i; ++j)
1303 if (ph_i != phase[j])
1304 add_stack_var_conflict (i, j);
1305 }
1306
1307 XDELETEVEC (phase);
1308}
1309
1310/* Create a decl for the guard at the top of the stack frame. */
1311
1312static void
1313create_stack_guard (void)
1314{
1315 tree guard = build_decl (VAR_DECL, NULL, ptr_type_node);
1316 TREE_THIS_VOLATILE (guard) = 1;
1317 TREE_USED (guard) = 1;
1318 expand_one_stack_var (guard);
cb91fab0 1319 crtl->stack_protect_guard = guard;
7d69de61
RH
1320}
1321
ff28a94d
JH
1322/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1323 expanding variables. Those variables that can be put into registers
1324 are allocated pseudos; those that can't are put on the stack.
1325
1326 TOPLEVEL is true if this is the outermost BLOCK. */
1327
1328static HOST_WIDE_INT
1329account_used_vars_for_block (tree block, bool toplevel)
1330{
1331 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1332 tree t;
1333 HOST_WIDE_INT size = 0;
1334
1335 old_sv_num = toplevel ? 0 : stack_vars_num;
1336
1337 /* Expand all variables at this level. */
1338 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1339 if (TREE_USED (t))
1340 size += expand_one_var (t, toplevel, false);
1341
1342 this_sv_num = stack_vars_num;
1343
1344 /* Expand all variables at containing levels. */
1345 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1346 size += account_used_vars_for_block (t, false);
1347
1348 /* Since we do not track exact variable lifetimes (which is not even
1349 possible for variables whose address escapes), we mirror the block
1350 tree in the interference graph. Here we cause all variables at this
1351 level, and all sublevels, to conflict. Do make certain that a
1352 variable conflicts with itself. */
1353 if (old_sv_num < this_sv_num)
1354 {
1355 new_sv_num = stack_vars_num;
1356 resize_stack_vars_conflict (new_sv_num);
1357
1358 for (i = old_sv_num; i < new_sv_num; ++i)
1359 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1360 add_stack_var_conflict (i, j);
1361 }
1362 return size;
1363}
1364
1365/* Prepare for expanding variables. */
1366static void
1367init_vars_expansion (void)
1368{
1369 tree t;
cb91fab0
JH
1370 /* Set TREE_USED on all variables in the local_decls. */
1371 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1372 TREE_USED (TREE_VALUE (t)) = 1;
1373
1374 /* Clear TREE_USED on all variables associated with a block scope. */
1375 clear_tree_used (DECL_INITIAL (current_function_decl));
1376
1377 /* Initialize local stack smashing state. */
1378 has_protected_decls = false;
1379 has_short_buffer = false;
1380}
1381
1382/* Free up stack variable graph data. */
1383static void
1384fini_vars_expansion (void)
1385{
1386 XDELETEVEC (stack_vars);
1387 XDELETEVEC (stack_vars_sorted);
1388 XDELETEVEC (stack_vars_conflict);
1389 stack_vars = NULL;
1390 stack_vars_alloc = stack_vars_num = 0;
1391 stack_vars_conflict = NULL;
1392 stack_vars_conflict_alloc = 0;
1393}
1394
1395HOST_WIDE_INT
1396estimated_stack_frame_size (void)
1397{
1398 HOST_WIDE_INT size = 0;
1399 tree t, outer_block = DECL_INITIAL (current_function_decl);
1400
1401 init_vars_expansion ();
1402
cb91fab0 1403 /* At this point all variables on the local_decls with TREE_USED
ff28a94d 1404 set are not associated with any block scope. Lay them out. */
cb91fab0 1405 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1406 {
1407 tree var = TREE_VALUE (t);
1408
1409 if (TREE_USED (var))
1410 size += expand_one_var (var, true, false);
1411 TREE_USED (var) = 1;
1412 }
1413 size += account_used_vars_for_block (outer_block, true);
1414 if (stack_vars_num > 0)
1415 {
1416 /* Due to the way alias sets work, no variables with non-conflicting
1417 alias sets may be assigned the same address. Add conflicts to
1418 reflect this. */
1419 add_alias_set_conflicts ();
1420
1421 /* If stack protection is enabled, we don't share space between
1422 vulnerable data and non-vulnerable data. */
1423 if (flag_stack_protect)
1424 add_stack_protection_conflicts ();
1425
1426 /* Now that we have collected all stack variables, and have computed a
1427 minimal interference graph, attempt to save some stack space. */
1428 partition_stack_vars ();
1429 if (dump_file)
1430 dump_stack_var_partition ();
1431
1432 size += account_stack_vars ();
1433 fini_vars_expansion ();
1434 }
1435 return size;
1436}
1437
1f6d3a08 1438/* Expand all variables used in the function. */
727a31fa
RH
1439
1440static void
1441expand_used_vars (void)
1442{
802e9f8e 1443 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
727a31fa 1444
1f6d3a08
RH
1445 /* Compute the phase of the stack frame for this function. */
1446 {
1447 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1448 int off = STARTING_FRAME_OFFSET % align;
1449 frame_phase = off ? align - off : 0;
1450 }
727a31fa 1451
ff28a94d 1452 init_vars_expansion ();
7d69de61 1453
cb91fab0 1454 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1455 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1456 t = cfun->local_decls;
1457 cfun->local_decls = NULL_TREE;
1458 for (; t; t = next)
1f6d3a08
RH
1459 {
1460 tree var = TREE_VALUE (t);
1461 bool expand_now = false;
1462
802e9f8e
JJ
1463 next = TREE_CHAIN (t);
1464
1f6d3a08
RH
1465 /* We didn't set a block for static or extern because it's hard
1466 to tell the difference between a global variable (re)declared
1467 in a local scope, and one that's really declared there to
1468 begin with. And it doesn't really matter much, since we're
1469 not giving them stack space. Expand them now. */
1470 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1471 expand_now = true;
1472
1473 /* Any variable that could have been hoisted into an SSA_NAME
1474 will have been propagated anywhere the optimizers chose,
1475 i.e. not confined to their original block. Allocate them
1476 as if they were defined in the outermost scope. */
1477 else if (is_gimple_reg (var))
1478 expand_now = true;
1479
1480 /* If the variable is not associated with any block, then it
1481 was created by the optimizers, and could be live anywhere
1482 in the function. */
1483 else if (TREE_USED (var))
1484 expand_now = true;
1485
1486 /* Finally, mark all variables on the list as used. We'll use
1487 this in a moment when we expand those associated with scopes. */
1488 TREE_USED (var) = 1;
1489
1490 if (expand_now)
802e9f8e
JJ
1491 {
1492 expand_one_var (var, true, true);
1493 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1494 {
1495 rtx rtl = DECL_RTL_IF_SET (var);
1496
1497 /* Keep artificial non-ignored vars in cfun->local_decls
1498 chain until instantiate_decls. */
1499 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1500 {
1501 TREE_CHAIN (t) = cfun->local_decls;
1502 cfun->local_decls = t;
1503 continue;
1504 }
1505 }
1506 }
1507
1508 ggc_free (t);
1f6d3a08 1509 }
1f6d3a08
RH
1510
1511 /* At this point, all variables within the block tree with TREE_USED
1512 set are actually used by the optimized function. Lay them out. */
1513 expand_used_vars_for_block (outer_block, true);
1514
1515 if (stack_vars_num > 0)
1516 {
1517 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1518 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1519 reflect this. */
1520 add_alias_set_conflicts ();
1521
c22cacf3 1522 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1523 vulnerable data and non-vulnerable data. */
1524 if (flag_stack_protect)
1525 add_stack_protection_conflicts ();
1526
c22cacf3 1527 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1528 minimal interference graph, attempt to save some stack space. */
1529 partition_stack_vars ();
1530 if (dump_file)
1531 dump_stack_var_partition ();
7d69de61
RH
1532 }
1533
1534 /* There are several conditions under which we should create a
1535 stack guard: protect-all, alloca used, protected decls present. */
1536 if (flag_stack_protect == 2
1537 || (flag_stack_protect
e3b5732b 1538 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1539 create_stack_guard ();
1f6d3a08 1540
7d69de61
RH
1541 /* Assign rtl to each variable based on these partitions. */
1542 if (stack_vars_num > 0)
1543 {
1544 /* Reorder decls to be protected by iterating over the variables
1545 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1546 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1547 earlier, such that we naturally see these variables first,
1548 and thus naturally allocate things in the right order. */
1549 if (has_protected_decls)
1550 {
1551 /* Phase 1 contains only character arrays. */
1552 expand_stack_vars (stack_protect_decl_phase_1);
1553
1554 /* Phase 2 contains other kinds of arrays. */
1555 if (flag_stack_protect == 2)
1556 expand_stack_vars (stack_protect_decl_phase_2);
1557 }
1558
1559 expand_stack_vars (NULL);
1f6d3a08 1560
ff28a94d 1561 fini_vars_expansion ();
1f6d3a08
RH
1562 }
1563
1564 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1565 if (STACK_ALIGNMENT_NEEDED)
1566 {
1567 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1568 if (!FRAME_GROWS_DOWNWARD)
1569 frame_offset += align - 1;
1570 frame_offset &= -align;
1571 }
727a31fa
RH
1572}
1573
1574
b7211528
SB
1575/* If we need to produce a detailed dump, print the tree representation
1576 for STMT to the dump file. SINCE is the last RTX after which the RTL
1577 generated for STMT should have been appended. */
1578
1579static void
726a989a 1580maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1581{
1582 if (dump_file && (dump_flags & TDF_DETAILS))
1583 {
1584 fprintf (dump_file, "\n;; ");
726a989a 1585 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
b7211528
SB
1586 fprintf (dump_file, "\n");
1587
1588 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1589 }
1590}
1591
8b11009b
ZD
1592/* Maps the blocks that do not contain tree labels to rtx labels. */
1593
1594static struct pointer_map_t *lab_rtx_for_bb;
1595
a9b77cd1
ZD
1596/* Returns the label_rtx expression for a label starting basic block BB. */
1597
1598static rtx
726a989a 1599label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1600{
726a989a
RB
1601 gimple_stmt_iterator gsi;
1602 tree lab;
1603 gimple lab_stmt;
8b11009b 1604 void **elt;
a9b77cd1
ZD
1605
1606 if (bb->flags & BB_RTL)
1607 return block_label (bb);
1608
8b11009b
ZD
1609 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1610 if (elt)
ae50c0cb 1611 return (rtx) *elt;
8b11009b
ZD
1612
1613 /* Find the tree label if it is present. */
1614
726a989a 1615 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1616 {
726a989a
RB
1617 lab_stmt = gsi_stmt (gsi);
1618 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1619 break;
1620
726a989a 1621 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1622 if (DECL_NONLOCAL (lab))
1623 break;
1624
1625 return label_rtx (lab);
1626 }
1627
8b11009b
ZD
1628 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1629 *elt = gen_label_rtx ();
ae50c0cb 1630 return (rtx) *elt;
a9b77cd1
ZD
1631}
1632
726a989a
RB
1633
1634/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1635 Returns a new basic block if we've terminated the current basic
1636 block and created a new one. */
1637
1638static basic_block
726a989a 1639expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1640{
1641 basic_block new_bb, dest;
1642 edge new_edge;
1643 edge true_edge;
1644 edge false_edge;
726a989a 1645 tree pred = gimple_cond_pred_to_tree (stmt);
b7211528
SB
1646 rtx last2, last;
1647
1648 last2 = last = get_last_insn ();
80c7a9eb
RH
1649
1650 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1651 if (gimple_has_location (stmt))
80c7a9eb 1652 {
726a989a
RB
1653 set_curr_insn_source_location (gimple_location (stmt));
1654 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1655 }
1656
1657 /* These flags have no purpose in RTL land. */
1658 true_edge->flags &= ~EDGE_TRUE_VALUE;
1659 false_edge->flags &= ~EDGE_FALSE_VALUE;
1660
1661 /* We can either have a pure conditional jump with one fallthru edge or
1662 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1663 if (false_edge->dest == bb->next_bb)
80c7a9eb 1664 {
a9b77cd1 1665 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1666 add_reg_br_prob_note (last, true_edge->probability);
726a989a 1667 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1668 if (true_edge->goto_locus)
7241571e
JJ
1669 {
1670 set_curr_insn_source_location (true_edge->goto_locus);
1671 set_curr_insn_block (true_edge->goto_block);
1672 true_edge->goto_locus = curr_insn_locator ();
1673 }
1674 true_edge->goto_block = NULL;
a9b77cd1 1675 false_edge->flags |= EDGE_FALLTHRU;
726a989a 1676 ggc_free (pred);
80c7a9eb
RH
1677 return NULL;
1678 }
a9b77cd1 1679 if (true_edge->dest == bb->next_bb)
80c7a9eb 1680 {
a9b77cd1 1681 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
10d22567 1682 add_reg_br_prob_note (last, false_edge->probability);
726a989a 1683 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1684 if (false_edge->goto_locus)
7241571e
JJ
1685 {
1686 set_curr_insn_source_location (false_edge->goto_locus);
1687 set_curr_insn_block (false_edge->goto_block);
1688 false_edge->goto_locus = curr_insn_locator ();
1689 }
1690 false_edge->goto_block = NULL;
a9b77cd1 1691 true_edge->flags |= EDGE_FALLTHRU;
726a989a 1692 ggc_free (pred);
80c7a9eb
RH
1693 return NULL;
1694 }
80c7a9eb 1695
a9b77cd1 1696 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1697 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1698 last = get_last_insn ();
7241571e
JJ
1699 if (false_edge->goto_locus)
1700 {
1701 set_curr_insn_source_location (false_edge->goto_locus);
1702 set_curr_insn_block (false_edge->goto_block);
1703 false_edge->goto_locus = curr_insn_locator ();
1704 }
1705 false_edge->goto_block = NULL;
a9b77cd1 1706 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1707
1708 BB_END (bb) = last;
1709 if (BARRIER_P (BB_END (bb)))
1710 BB_END (bb) = PREV_INSN (BB_END (bb));
1711 update_bb_for_insn (bb);
1712
1713 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1714 dest = false_edge->dest;
1715 redirect_edge_succ (false_edge, new_bb);
1716 false_edge->flags |= EDGE_FALLTHRU;
1717 new_bb->count = false_edge->count;
1718 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1719 new_edge = make_edge (new_bb, dest, 0);
1720 new_edge->probability = REG_BR_PROB_BASE;
1721 new_edge->count = new_bb->count;
1722 if (BARRIER_P (BB_END (new_bb)))
1723 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1724 update_bb_for_insn (new_bb);
1725
726a989a 1726 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1727
726a989a 1728 ggc_free (pred);
80c7a9eb
RH
1729 return new_bb;
1730}
1731
726a989a 1732/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
1733 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1734 generated a tail call (something that might be denied by the ABI
cea49550
RH
1735 rules governing the call; see calls.c).
1736
1737 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1738 can still reach the rest of BB. The case here is __builtin_sqrt,
1739 where the NaN result goes through the external function (with a
1740 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1741
1742static basic_block
726a989a 1743expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 1744{
b7211528 1745 rtx last2, last;
224e770b 1746 edge e;
628f6a4e 1747 edge_iterator ei;
224e770b
RH
1748 int probability;
1749 gcov_type count;
726a989a 1750 tree stmt_tree = gimple_to_tree (stmt);
80c7a9eb 1751
b7211528
SB
1752 last2 = last = get_last_insn ();
1753
726a989a
RB
1754 expand_expr_stmt (stmt_tree);
1755
1756 release_stmt_tree (stmt, stmt_tree);
80c7a9eb
RH
1757
1758 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1759 if (CALL_P (last) && SIBLING_CALL_P (last))
1760 goto found;
80c7a9eb 1761
726a989a 1762 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 1763
cea49550 1764 *can_fallthru = true;
224e770b 1765 return NULL;
80c7a9eb 1766
224e770b
RH
1767 found:
1768 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1769 Any instructions emitted here are about to be deleted. */
1770 do_pending_stack_adjust ();
1771
1772 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1773 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1774 EH or abnormal edges, we shouldn't have created a tail call in
1775 the first place. So it seems to me we should just be removing
1776 all edges here, or redirecting the existing fallthru edge to
1777 the exit block. */
1778
224e770b
RH
1779 probability = 0;
1780 count = 0;
224e770b 1781
628f6a4e
BE
1782 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1783 {
224e770b
RH
1784 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1785 {
1786 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1787 {
224e770b
RH
1788 e->dest->count -= e->count;
1789 e->dest->frequency -= EDGE_FREQUENCY (e);
1790 if (e->dest->count < 0)
c22cacf3 1791 e->dest->count = 0;
224e770b 1792 if (e->dest->frequency < 0)
c22cacf3 1793 e->dest->frequency = 0;
80c7a9eb 1794 }
224e770b
RH
1795 count += e->count;
1796 probability += e->probability;
1797 remove_edge (e);
80c7a9eb 1798 }
628f6a4e
BE
1799 else
1800 ei_next (&ei);
80c7a9eb
RH
1801 }
1802
224e770b
RH
1803 /* This is somewhat ugly: the call_expr expander often emits instructions
1804 after the sibcall (to perform the function return). These confuse the
12eff7b7 1805 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1806 last = NEXT_INSN (last);
341c100f 1807 gcc_assert (BARRIER_P (last));
cea49550
RH
1808
1809 *can_fallthru = false;
224e770b
RH
1810 while (NEXT_INSN (last))
1811 {
1812 /* For instance an sqrt builtin expander expands if with
1813 sibcall in the then and label for `else`. */
1814 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1815 {
1816 *can_fallthru = true;
1817 break;
1818 }
224e770b
RH
1819 delete_insn (NEXT_INSN (last));
1820 }
1821
1822 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1823 e->probability += probability;
1824 e->count += count;
1825 BB_END (bb) = last;
1826 update_bb_for_insn (bb);
1827
1828 if (NEXT_INSN (last))
1829 {
1830 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1831
1832 last = BB_END (bb);
1833 if (BARRIER_P (last))
1834 BB_END (bb) = PREV_INSN (last);
1835 }
1836
726a989a 1837 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 1838
224e770b 1839 return bb;
80c7a9eb
RH
1840}
1841
242229bb
JH
1842/* Expand basic block BB from GIMPLE trees to RTL. */
1843
1844static basic_block
10d22567 1845expand_gimple_basic_block (basic_block bb)
242229bb 1846{
726a989a
RB
1847 gimple_stmt_iterator gsi;
1848 gimple_seq stmts;
1849 gimple stmt = NULL;
242229bb
JH
1850 rtx note, last;
1851 edge e;
628f6a4e 1852 edge_iterator ei;
8b11009b 1853 void **elt;
242229bb
JH
1854
1855 if (dump_file)
726a989a
RB
1856 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
1857 bb->index);
1858
1859 /* Note that since we are now transitioning from GIMPLE to RTL, we
1860 cannot use the gsi_*_bb() routines because they expect the basic
1861 block to be in GIMPLE, instead of RTL. Therefore, we need to
1862 access the BB sequence directly. */
1863 stmts = bb_seq (bb);
1864 bb->il.gimple = NULL;
bf08ebeb 1865 rtl_profile_for_bb (bb);
5e2d947c
JH
1866 init_rtl_bb_info (bb);
1867 bb->flags |= BB_RTL;
1868
a9b77cd1
ZD
1869 /* Remove the RETURN_EXPR if we may fall though to the exit
1870 instead. */
726a989a
RB
1871 gsi = gsi_last (stmts);
1872 if (!gsi_end_p (gsi)
1873 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 1874 {
726a989a 1875 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
1876
1877 gcc_assert (single_succ_p (bb));
1878 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
1879
1880 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 1881 && !gimple_return_retval (ret_stmt))
a9b77cd1 1882 {
726a989a 1883 gsi_remove (&gsi, false);
a9b77cd1
ZD
1884 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
1885 }
1886 }
1887
726a989a
RB
1888 gsi = gsi_start (stmts);
1889 if (!gsi_end_p (gsi))
8b11009b 1890 {
726a989a
RB
1891 stmt = gsi_stmt (gsi);
1892 if (gimple_code (stmt) != GIMPLE_LABEL)
1893 stmt = NULL;
8b11009b 1894 }
242229bb 1895
8b11009b
ZD
1896 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1897
1898 if (stmt || elt)
242229bb
JH
1899 {
1900 last = get_last_insn ();
1901
8b11009b
ZD
1902 if (stmt)
1903 {
726a989a
RB
1904 tree stmt_tree = gimple_to_tree (stmt);
1905 expand_expr_stmt (stmt_tree);
1906 release_stmt_tree (stmt, stmt_tree);
1907 gsi_next (&gsi);
8b11009b
ZD
1908 }
1909
1910 if (elt)
ae50c0cb 1911 emit_label ((rtx) *elt);
242229bb 1912
caf93cb0 1913 /* Java emits line number notes in the top of labels.
c22cacf3 1914 ??? Make this go away once line number notes are obsoleted. */
242229bb 1915 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 1916 if (NOTE_P (BB_HEAD (bb)))
242229bb 1917 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 1918 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 1919
726a989a 1920 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
1921 }
1922 else
1923 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
1924
1925 NOTE_BASIC_BLOCK (note) = bb;
1926
628f6a4e 1927 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
242229bb 1928 {
242229bb
JH
1929 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
1930 e->flags &= ~EDGE_EXECUTABLE;
1931
1932 /* At the moment not all abnormal edges match the RTL representation.
c22cacf3
MS
1933 It is safe to remove them here as find_many_sub_basic_blocks will
1934 rediscover them. In the future we should get this fixed properly. */
242229bb
JH
1935 if (e->flags & EDGE_ABNORMAL)
1936 remove_edge (e);
628f6a4e
BE
1937 else
1938 ei_next (&ei);
242229bb
JH
1939 }
1940
726a989a 1941 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 1942 {
726a989a 1943 gimple stmt = gsi_stmt (gsi);
cea49550 1944 basic_block new_bb;
242229bb 1945
242229bb
JH
1946 /* Expand this statement, then evaluate the resulting RTL and
1947 fixup the CFG accordingly. */
726a989a 1948 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 1949 {
726a989a 1950 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
1951 if (new_bb)
1952 return new_bb;
1953 }
80c7a9eb 1954 else
242229bb 1955 {
726a989a 1956 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
1957 {
1958 bool can_fallthru;
1959 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
1960 if (new_bb)
1961 {
1962 if (can_fallthru)
1963 bb = new_bb;
1964 else
1965 return new_bb;
1966 }
1967 }
80c7a9eb 1968 else
b7211528 1969 {
726a989a 1970 tree stmt_tree = gimple_to_tree (stmt);
b7211528 1971 last = get_last_insn ();
726a989a
RB
1972 expand_expr_stmt (stmt_tree);
1973 maybe_dump_rtl_for_gimple_stmt (stmt, last);
1974 release_stmt_tree (stmt, stmt_tree);
b7211528 1975 }
242229bb
JH
1976 }
1977 }
1978
7241571e 1979 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
1980 FOR_EACH_EDGE (e, ei, bb->succs)
1981 {
7241571e
JJ
1982 if (e->goto_locus && e->goto_block)
1983 {
1984 set_curr_insn_source_location (e->goto_locus);
1985 set_curr_insn_block (e->goto_block);
1986 e->goto_locus = curr_insn_locator ();
1987 }
1988 e->goto_block = NULL;
1989 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
1990 {
1991 emit_jump (label_rtx_for_bb (e->dest));
1992 e->flags &= ~EDGE_FALLTHRU;
1993 }
a9b77cd1
ZD
1994 }
1995
242229bb
JH
1996 do_pending_stack_adjust ();
1997
3f117656 1998 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
1999 before a barrier and/or table jump insn. */
2000 last = get_last_insn ();
4b4bf941 2001 if (BARRIER_P (last))
242229bb
JH
2002 last = PREV_INSN (last);
2003 if (JUMP_TABLE_DATA_P (last))
2004 last = PREV_INSN (PREV_INSN (last));
2005 BB_END (bb) = last;
caf93cb0 2006
242229bb 2007 update_bb_for_insn (bb);
80c7a9eb 2008
242229bb
JH
2009 return bb;
2010}
2011
2012
2013/* Create a basic block for initialization code. */
2014
2015static basic_block
2016construct_init_block (void)
2017{
2018 basic_block init_block, first_block;
fd44f634
JH
2019 edge e = NULL;
2020 int flags;
275a4187 2021
fd44f634
JH
2022 /* Multiple entry points not supported yet. */
2023 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
2024 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2025 init_rtl_bb_info (EXIT_BLOCK_PTR);
2026 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2027 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 2028
fd44f634 2029 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 2030
fd44f634
JH
2031 /* When entry edge points to first basic block, we don't need jump,
2032 otherwise we have to jump into proper target. */
2033 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2034 {
726a989a 2035 tree label = gimple_block_label (e->dest);
fd44f634
JH
2036
2037 emit_jump (label_rtx (label));
2038 flags = 0;
275a4187 2039 }
fd44f634
JH
2040 else
2041 flags = EDGE_FALLTHRU;
242229bb
JH
2042
2043 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2044 get_last_insn (),
2045 ENTRY_BLOCK_PTR);
2046 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2047 init_block->count = ENTRY_BLOCK_PTR->count;
2048 if (e)
2049 {
2050 first_block = e->dest;
2051 redirect_edge_succ (e, init_block);
fd44f634 2052 e = make_edge (init_block, first_block, flags);
242229bb
JH
2053 }
2054 else
2055 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2056 e->probability = REG_BR_PROB_BASE;
2057 e->count = ENTRY_BLOCK_PTR->count;
2058
2059 update_bb_for_insn (init_block);
2060 return init_block;
2061}
2062
55e092c4
JH
2063/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2064 found in the block tree. */
2065
2066static void
2067set_block_levels (tree block, int level)
2068{
2069 while (block)
2070 {
2071 BLOCK_NUMBER (block) = level;
2072 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2073 block = BLOCK_CHAIN (block);
2074 }
2075}
242229bb
JH
2076
2077/* Create a block containing landing pads and similar stuff. */
2078
2079static void
2080construct_exit_block (void)
2081{
2082 rtx head = get_last_insn ();
2083 rtx end;
2084 basic_block exit_block;
628f6a4e
BE
2085 edge e, e2;
2086 unsigned ix;
2087 edge_iterator ei;
071a42f9 2088 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 2089
bf08ebeb
JH
2090 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2091
caf93cb0 2092 /* Make sure the locus is set to the end of the function, so that
242229bb 2093 epilogue line numbers and warnings are set properly. */
6773e15f 2094 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
2095 input_location = cfun->function_end_locus;
2096
2097 /* The following insns belong to the top scope. */
55e092c4 2098 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 2099
242229bb
JH
2100 /* Generate rtl for function exit. */
2101 expand_function_end ();
2102
2103 end = get_last_insn ();
2104 if (head == end)
2105 return;
071a42f9
JH
2106 /* While emitting the function end we could move end of the last basic block.
2107 */
2108 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 2109 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 2110 head = NEXT_INSN (head);
80c7a9eb
RH
2111 exit_block = create_basic_block (NEXT_INSN (head), end,
2112 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
2113 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2114 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
2115
2116 ix = 0;
2117 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 2118 {
8fb790fd 2119 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 2120 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
2121 redirect_edge_succ (e, exit_block);
2122 else
2123 ix++;
242229bb 2124 }
628f6a4e 2125
242229bb
JH
2126 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2127 e->probability = REG_BR_PROB_BASE;
2128 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 2129 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
2130 if (e2 != e)
2131 {
c22cacf3 2132 e->count -= e2->count;
242229bb
JH
2133 exit_block->count -= e2->count;
2134 exit_block->frequency -= EDGE_FREQUENCY (e2);
2135 }
2136 if (e->count < 0)
2137 e->count = 0;
2138 if (exit_block->count < 0)
2139 exit_block->count = 0;
2140 if (exit_block->frequency < 0)
2141 exit_block->frequency = 0;
2142 update_bb_for_insn (exit_block);
2143}
2144
c22cacf3 2145/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
2146 Look for ARRAY_REF nodes with non-constant indexes and mark them
2147 addressable. */
2148
2149static tree
2150discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2151 void *data ATTRIBUTE_UNUSED)
2152{
2153 tree t = *tp;
2154
2155 if (IS_TYPE_OR_DECL_P (t))
2156 *walk_subtrees = 0;
2157 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2158 {
2159 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2160 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2161 && (!TREE_OPERAND (t, 2)
2162 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2163 || (TREE_CODE (t) == COMPONENT_REF
2164 && (!TREE_OPERAND (t,2)
2165 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2166 || TREE_CODE (t) == BIT_FIELD_REF
2167 || TREE_CODE (t) == REALPART_EXPR
2168 || TREE_CODE (t) == IMAGPART_EXPR
2169 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 2170 || CONVERT_EXPR_P (t))
a1b23b2f
UW
2171 t = TREE_OPERAND (t, 0);
2172
2173 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2174 {
2175 t = get_base_address (t);
2176 if (t && DECL_P (t))
2177 TREE_ADDRESSABLE (t) = 1;
2178 }
2179
2180 *walk_subtrees = 0;
2181 }
2182
2183 return NULL_TREE;
2184}
2185
2186/* RTL expansion is not able to compile array references with variable
2187 offsets for arrays stored in single register. Discover such
2188 expressions and mark variables as addressable to avoid this
2189 scenario. */
2190
2191static void
2192discover_nonconstant_array_refs (void)
2193{
2194 basic_block bb;
726a989a 2195 gimple_stmt_iterator gsi;
a1b23b2f
UW
2196
2197 FOR_EACH_BB (bb)
726a989a
RB
2198 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2199 {
2200 gimple stmt = gsi_stmt (gsi);
2201 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2202 }
a1b23b2f
UW
2203}
2204
2e3f842f
L
2205/* This function sets crtl->args.internal_arg_pointer to a virtual
2206 register if DRAP is needed. Local register allocator will replace
2207 virtual_incoming_args_rtx with the virtual register. */
2208
2209static void
2210expand_stack_alignment (void)
2211{
2212 rtx drap_rtx;
e94a448f 2213 unsigned int preferred_stack_boundary, incoming_stack_boundary;
2e3f842f
L
2214
2215 if (! SUPPORTS_STACK_ALIGNMENT)
2216 return;
2217
2218 if (cfun->calls_alloca
2219 || cfun->has_nonlocal_label
2220 || crtl->has_nonlocal_goto)
2221 crtl->need_drap = true;
2222
2223 gcc_assert (crtl->stack_alignment_needed
2224 <= crtl->stack_alignment_estimated);
2225
2226 /* Update stack boundary if needed. */
2227 if (targetm.calls.update_stack_boundary)
2228 targetm.calls.update_stack_boundary ();
2229
2230 /* Update crtl->stack_alignment_estimated and use it later to align
2231 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2232 exceptions since callgraph doesn't collect incoming stack alignment
2233 in this case. */
2234 if (flag_non_call_exceptions
2235 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2236 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2237 else
2238 preferred_stack_boundary = crtl->preferred_stack_boundary;
2239 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2240 crtl->stack_alignment_estimated = preferred_stack_boundary;
2241 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2242 crtl->stack_alignment_needed = preferred_stack_boundary;
2243
e94a448f
L
2244 /* The incoming stack frame has to be aligned at least at
2245 parm_stack_boundary. */
2246 if (crtl->parm_stack_boundary > INCOMING_STACK_BOUNDARY)
2247 incoming_stack_boundary = crtl->parm_stack_boundary;
2248 else
2249 incoming_stack_boundary = INCOMING_STACK_BOUNDARY;
2250
2e3f842f 2251 crtl->stack_realign_needed
e94a448f 2252 = incoming_stack_boundary < crtl->stack_alignment_estimated;
d2d93c32 2253 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
2254
2255 crtl->stack_realign_processed = true;
2256
2257 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2258 alignment. */
2259 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2260 drap_rtx = targetm.calls.get_drap_rtx ();
2261
d015f7cc
L
2262 /* stack_realign_drap and drap_rtx must match. */
2263 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2264
2e3f842f
L
2265 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2266 if (NULL != drap_rtx)
2267 {
2268 crtl->args.internal_arg_pointer = drap_rtx;
2269
2270 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2271 needed. */
2272 fixup_tail_calls ();
2273 }
2274}
2275
242229bb
JH
2276/* Translate the intermediate representation contained in the CFG
2277 from GIMPLE trees to RTL.
2278
2279 We do conversion per basic block and preserve/update the tree CFG.
2280 This implies we have to do some magic as the CFG can simultaneously
2281 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 2282 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
2283 the expansion. */
2284
c2924966 2285static unsigned int
726a989a 2286gimple_expand_cfg (void)
242229bb
JH
2287{
2288 basic_block bb, init_block;
2289 sbitmap blocks;
0ef90296
ZD
2290 edge_iterator ei;
2291 edge e;
242229bb 2292
4586b4ca
SB
2293 /* Some backends want to know that we are expanding to RTL. */
2294 currently_expanding_to_rtl = 1;
2295
bf08ebeb
JH
2296 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2297
55e092c4
JH
2298 insn_locators_alloc ();
2299 if (!DECL_BUILT_IN (current_function_decl))
1751ecd6
AH
2300 {
2301 /* Eventually, all FEs should explicitly set function_start_locus. */
2302 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2303 set_curr_insn_source_location
2304 (DECL_SOURCE_LOCATION (current_function_decl));
2305 else
2306 set_curr_insn_source_location (cfun->function_start_locus);
2307 }
55e092c4
JH
2308 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2309 prologue_locator = curr_insn_locator ();
2310
2311 /* Make sure first insn is a note even if we don't want linenums.
2312 This makes sure the first insn will never be deleted.
2313 Also, final expects a note to appear there. */
2314 emit_note (NOTE_INSN_DELETED);
6429e3be 2315
a1b23b2f
UW
2316 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2317 discover_nonconstant_array_refs ();
2318
e41b2a33 2319 targetm.expand_to_rtl_hook ();
cb91fab0 2320 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f
L
2321 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2322 crtl->stack_alignment_estimated = STACK_BOUNDARY;
cb91fab0
JH
2323 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2324 cfun->cfg->max_jumptable_ents = 0;
2325
e41b2a33 2326
727a31fa 2327 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
2328 expand_used_vars ();
2329
7d69de61
RH
2330 /* Honor stack protection warnings. */
2331 if (warn_stack_protect)
2332 {
e3b5732b 2333 if (cfun->calls_alloca)
c5409249
MLI
2334 warning (OPT_Wstack_protector,
2335 "not protecting local variables: variable length buffer");
cb91fab0 2336 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
2337 warning (OPT_Wstack_protector,
2338 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
2339 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2340 }
2341
242229bb 2342 /* Set up parameters and prepare for return, for the function. */
b79c5284 2343 expand_function_start (current_function_decl);
242229bb
JH
2344
2345 /* If this function is `main', emit a call to `__main'
2346 to run global initializers, etc. */
2347 if (DECL_NAME (current_function_decl)
2348 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2349 && DECL_FILE_SCOPE_P (current_function_decl))
2350 expand_main_function ();
2351
7d69de61
RH
2352 /* Initialize the stack_protect_guard field. This must happen after the
2353 call to __main (if any) so that the external decl is initialized. */
cb91fab0 2354 if (crtl->stack_protect_guard)
7d69de61
RH
2355 stack_protect_prologue ();
2356
3fbd86b1 2357 /* Register rtl specific functions for cfg. */
242229bb
JH
2358 rtl_register_cfg_hooks ();
2359
2360 init_block = construct_init_block ();
2361
0ef90296 2362 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
2a8a8292 2363 remaining edges in expand_gimple_basic_block. */
0ef90296
ZD
2364 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2365 e->flags &= ~EDGE_EXECUTABLE;
2366
8b11009b 2367 lab_rtx_for_bb = pointer_map_create ();
242229bb 2368 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 2369 bb = expand_gimple_basic_block (bb);
bf08ebeb
JH
2370
2371 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2372 conservatively to true until they are all profile aware. */
8b11009b 2373 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 2374 free_histograms ();
242229bb
JH
2375
2376 construct_exit_block ();
55e092c4
JH
2377 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2378 insn_locators_finalize ();
242229bb 2379
4586b4ca
SB
2380 /* We're done expanding trees to RTL. */
2381 currently_expanding_to_rtl = 0;
2382
e8a2a782 2383 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
242229bb 2384 convert_from_eh_region_ranges ();
e8a2a782 2385 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
2386
2387 rebuild_jump_labels (get_insns ());
2388 find_exception_handler_labels ();
2389
2390 blocks = sbitmap_alloc (last_basic_block);
2391 sbitmap_ones (blocks);
2392 find_many_sub_basic_blocks (blocks);
25cd19de 2393 purge_all_dead_edges ();
242229bb
JH
2394 sbitmap_free (blocks);
2395
2396 compact_blocks ();
2e3f842f
L
2397
2398 expand_stack_alignment ();
2399
242229bb 2400#ifdef ENABLE_CHECKING
62e5bf5d 2401 verify_flow_info ();
242229bb 2402#endif
9f8628ba
PB
2403
2404 /* There's no need to defer outputting this function any more; we
2405 know we want to output it. */
2406 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2407
2408 /* Now that we're done expanding trees to RTL, we shouldn't have any
2409 more CONCATs anywhere. */
2410 generating_concat_p = 0;
2411
b7211528
SB
2412 if (dump_file)
2413 {
2414 fprintf (dump_file,
2415 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2416 /* And the pass manager will dump RTL for us. */
2417 }
ef330312
PB
2418
2419 /* If we're emitting a nested function, make sure its parent gets
2420 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 2421 {
ef330312
PB
2422 tree parent;
2423 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
2424 parent != NULL_TREE;
2425 parent = get_containing_scope (parent))
ef330312 2426 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 2427 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 2428 }
c22cacf3 2429
ef330312
PB
2430 /* We are now committed to emitting code for this function. Do any
2431 preparation, such as emitting abstract debug info for the inline
2432 before it gets mangled by optimization. */
2433 if (cgraph_function_possibly_inlined_p (current_function_decl))
2434 (*debug_hooks->outlining_inline_function) (current_function_decl);
2435
2436 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
2437
2438 /* After expanding, the return labels are no longer needed. */
2439 return_label = NULL;
2440 naked_return_label = NULL;
55e092c4
JH
2441 /* Tag the blocks with a depth number so that change_scope can find
2442 the common parent easily. */
2443 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 2444 default_rtl_profile ();
c2924966 2445 return 0;
242229bb
JH
2446}
2447
e3b5732b 2448struct rtl_opt_pass pass_expand =
242229bb 2449{
8ddbbcae 2450 {
e3b5732b 2451 RTL_PASS,
c22cacf3 2452 "expand", /* name */
242229bb 2453 NULL, /* gate */
726a989a 2454 gimple_expand_cfg, /* execute */
242229bb
JH
2455 NULL, /* sub */
2456 NULL, /* next */
2457 0, /* static_pass_number */
c22cacf3 2458 TV_EXPAND, /* tv_id */
242229bb
JH
2459 /* ??? If TER is enabled, we actually receive GENERIC. */
2460 PROP_gimple_leh | PROP_cfg, /* properties_required */
2461 PROP_rtl, /* properties_provided */
bbbe4e7b 2462 PROP_trees, /* properties_destroyed */
242229bb 2463 0, /* todo_flags_start */
ef330312 2464 TODO_dump_func, /* todo_flags_finish */
8ddbbcae 2465 }
242229bb 2466};