]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR target/40668 (64-bit sparc miscompiles memcpy of argument inside switch)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
66647d44
JJ
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
6946b3f7 43#include "value-prof.h"
e41b2a33 44#include "target.h"
4e3825db 45#include "ssaexpand.h"
7d69de61 46
726a989a 47
4e3825db
MM
48/* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50struct ssaexpand SA;
51
726a989a
RB
52/* Return an expression tree corresponding to the RHS of GIMPLE
53 statement STMT. */
54
55tree
56gimple_assign_rhs_to_tree (gimple stmt)
57{
58 tree t;
82d6e6fc 59 enum gimple_rhs_class grhs_class;
726a989a 60
82d6e6fc 61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 62
82d6e6fc 63 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
82d6e6fc 68 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
82d6e6fc 72 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
73 t = gimple_assign_rhs1 (stmt);
74 else
75 gcc_unreachable ();
76
77 return t;
78}
79
80/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
81 statement STMT. */
82
83static tree
84gimple_cond_pred_to_tree (gimple stmt)
85{
4e3825db
MM
86 /* We're sometimes presented with such code:
87 D.123_1 = x < y;
88 if (D.123_1 != 0)
89 ...
90 This would expand to two comparisons which then later might
91 be cleaned up by combine. But some pattern matchers like if-conversion
92 work better when there's only one compare, so make up for this
93 here as special exception if TER would have made the same change. */
94 tree lhs = gimple_cond_lhs (stmt);
95 if (SA.values
96 && TREE_CODE (lhs) == SSA_NAME
e97809c6
MM
97 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (lhs)))
98 lhs = gimple_assign_rhs_to_tree (SSA_NAME_DEF_STMT (lhs));
4e3825db 99
726a989a 100 return build2 (gimple_cond_code (stmt), boolean_type_node,
4e3825db 101 lhs, gimple_cond_rhs (stmt));
726a989a
RB
102}
103
104/* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
105 inside *TP. DATA is the location to set. */
106
107static tree
108set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
109{
110 location_t *loc = (location_t *) data;
111 if (EXPR_P (*tp))
112 SET_EXPR_LOCATION (*tp, *loc);
113
114 return NULL_TREE;
115}
116
117
118/* RTL expansion has traditionally been done on trees, so the
119 transition to doing it on GIMPLE tuples is very invasive to the RTL
120 expander. To facilitate the transition, this function takes a
121 GIMPLE tuple STMT and returns the same statement in the form of a
122 tree. */
123
124static tree
125gimple_to_tree (gimple stmt)
126{
127 tree t;
128 int rn;
129 tree_ann_common_t ann;
130 location_t loc;
131
132 switch (gimple_code (stmt))
133 {
134 case GIMPLE_ASSIGN:
135 {
136 tree lhs = gimple_assign_lhs (stmt);
137
138 t = gimple_assign_rhs_to_tree (stmt);
139 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
140 if (gimple_assign_nontemporal_move_p (stmt))
141 MOVE_NONTEMPORAL (t) = true;
142 }
143 break;
144
145 case GIMPLE_COND:
146 t = gimple_cond_pred_to_tree (stmt);
147 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
148 break;
149
150 case GIMPLE_GOTO:
151 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
152 break;
153
154 case GIMPLE_LABEL:
155 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
156 break;
157
158 case GIMPLE_RETURN:
159 {
160 tree retval = gimple_return_retval (stmt);
161
162 if (retval && retval != error_mark_node)
163 {
164 tree result = DECL_RESULT (current_function_decl);
165
166 /* If we are not returning the current function's RESULT_DECL,
167 build an assignment to it. */
168 if (retval != result)
169 {
170 /* I believe that a function's RESULT_DECL is unique. */
171 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
172
173 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
174 result, retval);
175 }
176 }
177 t = build1 (RETURN_EXPR, void_type_node, retval);
178 }
179 break;
180
181 case GIMPLE_ASM:
182 {
183 size_t i, n;
184 tree out, in, cl;
185 const char *s;
186
187 out = NULL_TREE;
188 n = gimple_asm_noutputs (stmt);
189 if (n > 0)
190 {
191 t = out = gimple_asm_output_op (stmt, 0);
192 for (i = 1; i < n; i++)
193 {
194 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
195 t = gimple_asm_output_op (stmt, i);
196 }
197 }
198
199 in = NULL_TREE;
200 n = gimple_asm_ninputs (stmt);
201 if (n > 0)
202 {
203 t = in = gimple_asm_input_op (stmt, 0);
204 for (i = 1; i < n; i++)
205 {
206 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
207 t = gimple_asm_input_op (stmt, i);
208 }
209 }
210
211 cl = NULL_TREE;
212 n = gimple_asm_nclobbers (stmt);
213 if (n > 0)
214 {
215 t = cl = gimple_asm_clobber_op (stmt, 0);
216 for (i = 1; i < n; i++)
217 {
218 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
219 t = gimple_asm_clobber_op (stmt, i);
220 }
221 }
222
223 s = gimple_asm_string (stmt);
224 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
225 out, in, cl);
226 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
227 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
228 }
229 break;
230
231 case GIMPLE_CALL:
232 {
233 size_t i;
234 tree fn;
235 tree_ann_common_t ann;
236
237 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
238
7c9577be 239 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
726a989a 240 TREE_TYPE (t) = gimple_call_return_type (stmt);
726a989a
RB
241 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
242
243 for (i = 0; i < gimple_call_num_args (stmt); i++)
244 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
245
246 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
247 TREE_SIDE_EFFECTS (t) = 1;
248
249 if (gimple_call_flags (stmt) & ECF_NOTHROW)
250 TREE_NOTHROW (t) = 1;
251
252 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
253 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
254 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
255 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
256 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
257
258 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
259 {
260 tree lhs = gimple_call_lhs (stmt);
261
262 if (lhs)
263 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
264 }
265
266 /* Record the original call statement, as it may be used
267 to retrieve profile information during expansion. */
7c9577be
RG
268
269 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
270 && DECL_BUILT_IN (fn))
726a989a
RB
271 {
272 ann = get_tree_common_ann (t);
273 ann->stmt = stmt;
274 }
275 }
276 break;
277
278 case GIMPLE_SWITCH:
279 {
280 tree label_vec;
281 size_t i;
282 tree elt = gimple_switch_label (stmt, 0);
283
284 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
285
286 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
287 {
288 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
289 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
290
291 /* The default case in a SWITCH_EXPR must be at the end of
292 the label vector. */
293 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
294 }
295 else
296 {
297 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
298 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
299 }
300
301 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
302 NULL, label_vec);
303 }
304 break;
305
306 case GIMPLE_NOP:
307 case GIMPLE_PREDICT:
308 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
309 break;
310
311 case GIMPLE_RESX:
312 t = build_resx (gimple_resx_region (stmt));
313 break;
314
315 default:
316 if (errorcount == 0)
317 {
318 error ("Unrecognized GIMPLE statement during RTL expansion");
319 print_gimple_stmt (stderr, stmt, 4, 0);
320 gcc_unreachable ();
321 }
322 else
323 {
324 /* Ignore any bad gimple codes if we're going to die anyhow,
325 so we can at least set TREE_ASM_WRITTEN and have the rest
326 of compilation advance without sudden ICE death. */
327 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
328 break;
329 }
330 }
331
332 /* If STMT is inside an exception region, record it in the generated
333 expression. */
334 rn = lookup_stmt_eh_region (stmt);
335 if (rn >= 0)
336 {
337 tree call = get_call_expr_in (t);
338
339 ann = get_tree_common_ann (t);
340 ann->rn = rn;
341
342 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
343 the CALL_EXPR not the assignment statment for EH region number. */
344 if (call && call != t)
345 {
346 ann = get_tree_common_ann (call);
347 ann->rn = rn;
348 }
349 }
350
351 /* Set EXPR_LOCATION in all the embedded expressions. */
352 loc = gimple_location (stmt);
353 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
354
355 TREE_BLOCK (t) = gimple_block (stmt);
356
357 return t;
358}
359
360
361/* Release back to GC memory allocated by gimple_to_tree. */
362
363static void
364release_stmt_tree (gimple stmt, tree stmt_tree)
365{
366 tree_ann_common_t ann;
367
368 switch (gimple_code (stmt))
369 {
370 case GIMPLE_ASSIGN:
371 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
372 ggc_free (TREE_OPERAND (stmt_tree, 1));
373 break;
374 case GIMPLE_COND:
375 ggc_free (COND_EXPR_COND (stmt_tree));
376 break;
377 case GIMPLE_RETURN:
378 if (TREE_OPERAND (stmt_tree, 0)
379 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
380 ggc_free (TREE_OPERAND (stmt_tree, 0));
381 break;
382 case GIMPLE_CALL:
383 if (gimple_call_lhs (stmt))
384 {
726a989a
RB
385 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
386 if (ann)
387 ggc_free (ann);
388 ggc_free (TREE_OPERAND (stmt_tree, 1));
389 }
726a989a
RB
390 break;
391 default:
392 break;
393 }
394 ann = tree_common_ann (stmt_tree);
395 if (ann)
396 ggc_free (ann);
397 ggc_free (stmt_tree);
398}
399
400
e53de54d
JH
401/* Verify that there is exactly single jump instruction since last and attach
402 REG_BR_PROB note specifying probability.
403 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
404 re-distribute it when the conditional expands into multiple conditionals.
405 This is however difficult to do. */
ef950eba 406void
10d22567 407add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
408{
409 if (profile_status == PROFILE_ABSENT)
410 return;
411 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 412 if (JUMP_P (last))
e53de54d
JH
413 {
414 /* It is common to emit condjump-around-jump sequence when we don't know
415 how to reverse the conditional. Special case this. */
416 if (!any_condjump_p (last)
2ca202e7 417 || !JUMP_P (NEXT_INSN (last))
e53de54d 418 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 419 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 420 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 421 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 422 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
423 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
424 goto failed;
41806d92 425 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6
ILT
426 add_reg_note (last, REG_BR_PROB,
427 GEN_INT (REG_BR_PROB_BASE - probability));
e53de54d
JH
428 return;
429 }
2ca202e7 430 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
431 goto failed;
432 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6 433 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
e53de54d
JH
434 return;
435failed:
436 if (dump_file)
437 fprintf (dump_file, "Failed to add probability note\n");
438}
439
80c7a9eb 440
1f6d3a08
RH
441#ifndef STACK_ALIGNMENT_NEEDED
442#define STACK_ALIGNMENT_NEEDED 1
443#endif
444
4e3825db
MM
445#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
446
447/* Associate declaration T with storage space X. If T is no
448 SSA name this is exactly SET_DECL_RTL, otherwise make the
449 partition of T associated with X. */
450static inline void
451set_rtl (tree t, rtx x)
452{
453 if (TREE_CODE (t) == SSA_NAME)
454 {
455 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
456 if (x && !MEM_P (x))
457 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
458 /* For the benefit of debug information at -O0 (where vartracking
459 doesn't run) record the place also in the base DECL if it's
460 a normal variable (not a parameter). */
461 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
462 {
463 tree var = SSA_NAME_VAR (t);
464 /* If we don't yet have something recorded, just record it now. */
465 if (!DECL_RTL_SET_P (var))
466 SET_DECL_RTL (var, x);
467 /* If we have it set alrady to "multiple places" don't
468 change this. */
469 else if (DECL_RTL (var) == pc_rtx)
470 ;
471 /* If we have something recorded and it's not the same place
472 as we want to record now, we have multiple partitions for the
473 same base variable, with different places. We can't just
474 randomly chose one, hence we have to say that we don't know.
475 This only happens with optimization, and there var-tracking
476 will figure out the right thing. */
477 else if (DECL_RTL (var) != x)
478 SET_DECL_RTL (var, pc_rtx);
479 }
4e3825db
MM
480 }
481 else
482 SET_DECL_RTL (t, x);
483}
1f6d3a08
RH
484
485/* This structure holds data relevant to one variable that will be
486 placed in a stack slot. */
487struct stack_var
488{
489 /* The Variable. */
490 tree decl;
491
492 /* The offset of the variable. During partitioning, this is the
493 offset relative to the partition. After partitioning, this
494 is relative to the stack frame. */
495 HOST_WIDE_INT offset;
496
497 /* Initially, the size of the variable. Later, the size of the partition,
498 if this variable becomes it's partition's representative. */
499 HOST_WIDE_INT size;
500
501 /* The *byte* alignment required for this variable. Or as, with the
502 size, the alignment for this partition. */
503 unsigned int alignb;
504
505 /* The partition representative. */
506 size_t representative;
507
508 /* The next stack variable in the partition, or EOC. */
509 size_t next;
510};
511
512#define EOC ((size_t)-1)
513
514/* We have an array of such objects while deciding allocation. */
515static struct stack_var *stack_vars;
516static size_t stack_vars_alloc;
517static size_t stack_vars_num;
518
fa10beec 519/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
520 is non-decreasing. */
521static size_t *stack_vars_sorted;
522
523/* We have an interference graph between such objects. This graph
524 is lower triangular. */
525static bool *stack_vars_conflict;
526static size_t stack_vars_conflict_alloc;
527
528/* The phase of the stack frame. This is the known misalignment of
529 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
530 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
531static int frame_phase;
532
7d69de61
RH
533/* Used during expand_used_vars to remember if we saw any decls for
534 which we'd like to enable stack smashing protection. */
535static bool has_protected_decls;
536
537/* Used during expand_used_vars. Remember if we say a character buffer
538 smaller than our cutoff threshold. Used for -Wstack-protector. */
539static bool has_short_buffer;
1f6d3a08
RH
540
541/* Discover the byte alignment to use for DECL. Ignore alignment
542 we can't do with expected alignment of the stack boundary. */
543
544static unsigned int
545get_decl_align_unit (tree decl)
546{
547 unsigned int align;
548
9bfaf89d 549 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
550
551 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
552 align = MAX_SUPPORTED_STACK_ALIGNMENT;
553
554 if (SUPPORTS_STACK_ALIGNMENT)
555 {
556 if (crtl->stack_alignment_estimated < align)
557 {
558 gcc_assert(!crtl->stack_realign_processed);
559 crtl->stack_alignment_estimated = align;
560 }
561 }
562
563 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
564 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
565 if (crtl->stack_alignment_needed < align)
566 crtl->stack_alignment_needed = align;
f85882d8
JY
567 if (crtl->max_used_stack_slot_alignment < align)
568 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
569
570 return align / BITS_PER_UNIT;
571}
572
573/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
574 Return the frame offset. */
575
576static HOST_WIDE_INT
577alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
578{
579 HOST_WIDE_INT offset, new_frame_offset;
580
581 new_frame_offset = frame_offset;
582 if (FRAME_GROWS_DOWNWARD)
583 {
584 new_frame_offset -= size + frame_phase;
585 new_frame_offset &= -align;
586 new_frame_offset += frame_phase;
587 offset = new_frame_offset;
588 }
589 else
590 {
591 new_frame_offset -= frame_phase;
592 new_frame_offset += align - 1;
593 new_frame_offset &= -align;
594 new_frame_offset += frame_phase;
595 offset = new_frame_offset;
596 new_frame_offset += size;
597 }
598 frame_offset = new_frame_offset;
599
9fb798d7
EB
600 if (frame_offset_overflow (frame_offset, cfun->decl))
601 frame_offset = offset = 0;
602
1f6d3a08
RH
603 return offset;
604}
605
606/* Accumulate DECL into STACK_VARS. */
607
608static void
609add_stack_var (tree decl)
610{
611 if (stack_vars_num >= stack_vars_alloc)
612 {
613 if (stack_vars_alloc)
614 stack_vars_alloc = stack_vars_alloc * 3 / 2;
615 else
616 stack_vars_alloc = 32;
617 stack_vars
618 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
619 }
620 stack_vars[stack_vars_num].decl = decl;
621 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
622 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
623 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
624
625 /* All variables are initially in their own partition. */
626 stack_vars[stack_vars_num].representative = stack_vars_num;
627 stack_vars[stack_vars_num].next = EOC;
628
629 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 630 set_rtl (decl, pc_rtx);
1f6d3a08
RH
631
632 stack_vars_num++;
633}
634
635/* Compute the linear index of a lower-triangular coordinate (I, J). */
636
637static size_t
638triangular_index (size_t i, size_t j)
639{
640 if (i < j)
641 {
642 size_t t;
643 t = i, i = j, j = t;
644 }
645 return (i * (i + 1)) / 2 + j;
646}
647
648/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
649
650static void
651resize_stack_vars_conflict (size_t n)
652{
653 size_t size = triangular_index (n-1, n-1) + 1;
654
655 if (size <= stack_vars_conflict_alloc)
656 return;
657
658 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
659 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
660 (size - stack_vars_conflict_alloc) * sizeof (bool));
661 stack_vars_conflict_alloc = size;
662}
663
664/* Make the decls associated with luid's X and Y conflict. */
665
666static void
667add_stack_var_conflict (size_t x, size_t y)
668{
669 size_t index = triangular_index (x, y);
670 gcc_assert (index < stack_vars_conflict_alloc);
671 stack_vars_conflict[index] = true;
672}
673
674/* Check whether the decls associated with luid's X and Y conflict. */
675
676static bool
677stack_var_conflict_p (size_t x, size_t y)
678{
679 size_t index = triangular_index (x, y);
680 gcc_assert (index < stack_vars_conflict_alloc);
681 return stack_vars_conflict[index];
682}
d239ed56
SB
683
684/* Returns true if TYPE is or contains a union type. */
685
686static bool
687aggregate_contains_union_type (tree type)
688{
689 tree field;
690
691 if (TREE_CODE (type) == UNION_TYPE
692 || TREE_CODE (type) == QUAL_UNION_TYPE)
693 return true;
694 if (TREE_CODE (type) == ARRAY_TYPE)
695 return aggregate_contains_union_type (TREE_TYPE (type));
696 if (TREE_CODE (type) != RECORD_TYPE)
697 return false;
698
699 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
700 if (TREE_CODE (field) == FIELD_DECL)
701 if (aggregate_contains_union_type (TREE_TYPE (field)))
702 return true;
703
704 return false;
705}
706
1f6d3a08
RH
707/* A subroutine of expand_used_vars. If two variables X and Y have alias
708 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
709 in the interference graph. We also need to make sure to add conflicts
710 for union containing structures. Else RTL alias analysis comes along
711 and due to type based aliasing rules decides that for two overlapping
712 union temporaries { short s; int i; } accesses to the same mem through
713 different types may not alias and happily reorders stores across
714 life-time boundaries of the temporaries (See PR25654).
715 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
716
717static void
718add_alias_set_conflicts (void)
719{
720 size_t i, j, n = stack_vars_num;
721
722 for (i = 0; i < n; ++i)
723 {
a4d25453
RH
724 tree type_i = TREE_TYPE (stack_vars[i].decl);
725 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 726 bool contains_union;
1f6d3a08 727
d239ed56 728 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
729 for (j = 0; j < i; ++j)
730 {
a4d25453
RH
731 tree type_j = TREE_TYPE (stack_vars[j].decl);
732 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
733 if (aggr_i != aggr_j
734 /* Either the objects conflict by means of type based
735 aliasing rules, or we need to add a conflict. */
736 || !objects_must_conflict_p (type_i, type_j)
737 /* In case the types do not conflict ensure that access
738 to elements will conflict. In case of unions we have
739 to be careful as type based aliasing rules may say
740 access to the same memory does not conflict. So play
741 safe and add a conflict in this case. */
742 || contains_union)
1f6d3a08
RH
743 add_stack_var_conflict (i, j);
744 }
745 }
746}
747
748/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 749 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
750
751static int
752stack_var_size_cmp (const void *a, const void *b)
753{
754 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
755 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
756 tree decla, declb;
757 unsigned int uida, uidb;
1f6d3a08
RH
758
759 if (sa < sb)
760 return -1;
761 if (sa > sb)
762 return 1;
4e3825db
MM
763 decla = stack_vars[*(const size_t *)a].decl;
764 declb = stack_vars[*(const size_t *)b].decl;
765 /* For stack variables of the same size use and id of the decls
766 to make the sort stable. Two SSA names are compared by their
767 version, SSA names come before non-SSA names, and two normal
768 decls are compared by their DECL_UID. */
769 if (TREE_CODE (decla) == SSA_NAME)
770 {
771 if (TREE_CODE (declb) == SSA_NAME)
772 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
773 else
774 return -1;
775 }
776 else if (TREE_CODE (declb) == SSA_NAME)
777 return 1;
778 else
779 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
780 if (uida < uidb)
781 return -1;
782 if (uida > uidb)
783 return 1;
1f6d3a08
RH
784 return 0;
785}
786
787/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
788 partitioning algorithm. Partitions A and B are known to be non-conflicting.
789 Merge them into a single partition A.
790
791 At the same time, add OFFSET to all variables in partition B. At the end
792 of the partitioning process we've have a nice block easy to lay out within
793 the stack frame. */
794
795static void
796union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
797{
798 size_t i, last;
799
800 /* Update each element of partition B with the given offset,
801 and merge them into partition A. */
802 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
803 {
804 stack_vars[i].offset += offset;
805 stack_vars[i].representative = a;
806 }
807 stack_vars[last].next = stack_vars[a].next;
808 stack_vars[a].next = b;
809
810 /* Update the required alignment of partition A to account for B. */
811 if (stack_vars[a].alignb < stack_vars[b].alignb)
812 stack_vars[a].alignb = stack_vars[b].alignb;
813
814 /* Update the interference graph and merge the conflicts. */
815 for (last = stack_vars_num, i = 0; i < last; ++i)
816 if (stack_var_conflict_p (b, i))
817 add_stack_var_conflict (a, i);
818}
819
820/* A subroutine of expand_used_vars. Binpack the variables into
821 partitions constrained by the interference graph. The overall
822 algorithm used is as follows:
823
824 Sort the objects by size.
825 For each object A {
826 S = size(A)
827 O = 0
828 loop {
829 Look for the largest non-conflicting object B with size <= S.
830 UNION (A, B)
831 offset(B) = O
832 O += size(B)
833 S -= size(B)
834 }
835 }
836*/
837
838static void
839partition_stack_vars (void)
840{
841 size_t si, sj, n = stack_vars_num;
842
843 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
844 for (si = 0; si < n; ++si)
845 stack_vars_sorted[si] = si;
846
847 if (n == 1)
848 return;
849
850 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
851
852 /* Special case: detect when all variables conflict, and thus we can't
853 do anything during the partitioning loop. It isn't uncommon (with
854 C code at least) to declare all variables at the top of the function,
855 and if we're not inlining, then all variables will be in the same scope.
856 Take advantage of very fast libc routines for this scan. */
857 gcc_assert (sizeof(bool) == sizeof(char));
858 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
859 return;
860
861 for (si = 0; si < n; ++si)
862 {
863 size_t i = stack_vars_sorted[si];
864 HOST_WIDE_INT isize = stack_vars[i].size;
865 HOST_WIDE_INT offset = 0;
866
867 for (sj = si; sj-- > 0; )
868 {
869 size_t j = stack_vars_sorted[sj];
870 HOST_WIDE_INT jsize = stack_vars[j].size;
871 unsigned int jalign = stack_vars[j].alignb;
872
873 /* Ignore objects that aren't partition representatives. */
874 if (stack_vars[j].representative != j)
875 continue;
876
877 /* Ignore objects too large for the remaining space. */
878 if (isize < jsize)
879 continue;
880
881 /* Ignore conflicting objects. */
882 if (stack_var_conflict_p (i, j))
883 continue;
884
885 /* Refine the remaining space check to include alignment. */
886 if (offset & (jalign - 1))
887 {
888 HOST_WIDE_INT toff = offset;
889 toff += jalign - 1;
890 toff &= -(HOST_WIDE_INT)jalign;
891 if (isize - (toff - offset) < jsize)
892 continue;
893
894 isize -= toff - offset;
895 offset = toff;
896 }
897
898 /* UNION the objects, placing J at OFFSET. */
899 union_stack_vars (i, j, offset);
900
901 isize -= jsize;
902 if (isize == 0)
903 break;
904 }
905 }
906}
907
908/* A debugging aid for expand_used_vars. Dump the generated partitions. */
909
910static void
911dump_stack_var_partition (void)
912{
913 size_t si, i, j, n = stack_vars_num;
914
915 for (si = 0; si < n; ++si)
916 {
917 i = stack_vars_sorted[si];
918
919 /* Skip variables that aren't partition representatives, for now. */
920 if (stack_vars[i].representative != i)
921 continue;
922
923 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
924 " align %u\n", (unsigned long) i, stack_vars[i].size,
925 stack_vars[i].alignb);
926
927 for (j = i; j != EOC; j = stack_vars[j].next)
928 {
929 fputc ('\t', dump_file);
930 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
931 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 932 stack_vars[j].offset);
1f6d3a08
RH
933 }
934 }
935}
936
937/* Assign rtl to DECL at frame offset OFFSET. */
938
939static void
940expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
941{
2ac26e15
L
942 /* Alignment is unsigned. */
943 unsigned HOST_WIDE_INT align;
1f6d3a08 944 rtx x;
c22cacf3 945
1f6d3a08
RH
946 /* If this fails, we've overflowed the stack frame. Error nicely? */
947 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
948
949 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 950 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 951
4e3825db
MM
952 if (TREE_CODE (decl) != SSA_NAME)
953 {
954 /* Set alignment we actually gave this decl if it isn't an SSA name.
955 If it is we generate stack slots only accidentally so it isn't as
956 important, we'll simply use the alignment that is already set. */
957 offset -= frame_phase;
958 align = offset & -offset;
959 align *= BITS_PER_UNIT;
960 if (align == 0)
961 align = STACK_BOUNDARY;
962 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
963 align = MAX_SUPPORTED_STACK_ALIGNMENT;
964
965 DECL_ALIGN (decl) = align;
966 DECL_USER_ALIGN (decl) = 0;
967 }
968
969 set_mem_attributes (x, SSAVAR (decl), true);
970 set_rtl (decl, x);
1f6d3a08
RH
971}
972
973/* A subroutine of expand_used_vars. Give each partition representative
974 a unique location within the stack frame. Update each partition member
975 with that location. */
976
977static void
7d69de61 978expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
979{
980 size_t si, i, j, n = stack_vars_num;
981
982 for (si = 0; si < n; ++si)
983 {
984 HOST_WIDE_INT offset;
985
986 i = stack_vars_sorted[si];
987
988 /* Skip variables that aren't partition representatives, for now. */
989 if (stack_vars[i].representative != i)
990 continue;
991
7d69de61
RH
992 /* Skip variables that have already had rtl assigned. See also
993 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
994 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
995 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
996 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
997 continue;
998
c22cacf3 999 /* Check the predicate to see whether this variable should be
7d69de61
RH
1000 allocated in this pass. */
1001 if (pred && !pred (stack_vars[i].decl))
1002 continue;
1003
1f6d3a08
RH
1004 offset = alloc_stack_frame_space (stack_vars[i].size,
1005 stack_vars[i].alignb);
1006
1007 /* Create rtl for each variable based on their location within the
1008 partition. */
1009 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
1010 {
1011 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
1012 expand_one_stack_var_at (stack_vars[j].decl,
1013 stack_vars[j].offset + offset);
1014 }
1f6d3a08
RH
1015 }
1016}
1017
ff28a94d
JH
1018/* Take into account all sizes of partitions and reset DECL_RTLs. */
1019static HOST_WIDE_INT
1020account_stack_vars (void)
1021{
1022 size_t si, j, i, n = stack_vars_num;
1023 HOST_WIDE_INT size = 0;
1024
1025 for (si = 0; si < n; ++si)
1026 {
1027 i = stack_vars_sorted[si];
1028
1029 /* Skip variables that aren't partition representatives, for now. */
1030 if (stack_vars[i].representative != i)
1031 continue;
1032
1033 size += stack_vars[i].size;
1034 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1035 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1036 }
1037 return size;
1038}
1039
1f6d3a08
RH
1040/* A subroutine of expand_one_var. Called to immediately assign rtl
1041 to a variable to be allocated in the stack frame. */
1042
1043static void
1044expand_one_stack_var (tree var)
1045{
1046 HOST_WIDE_INT size, offset, align;
1047
4e3825db
MM
1048 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1049 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
1050 offset = alloc_stack_frame_space (size, align);
1051
1052 expand_one_stack_var_at (var, offset);
1053}
1054
1f6d3a08
RH
1055/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1056 that will reside in a hard register. */
1057
1058static void
1059expand_one_hard_reg_var (tree var)
1060{
1061 rest_of_decl_compilation (var, 0, 0);
1062}
1063
1064/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1065 that will reside in a pseudo register. */
1066
1067static void
1068expand_one_register_var (tree var)
1069{
4e3825db
MM
1070 tree decl = SSAVAR (var);
1071 tree type = TREE_TYPE (decl);
1f6d3a08
RH
1072 int unsignedp = TYPE_UNSIGNED (type);
1073 enum machine_mode reg_mode
4e3825db 1074 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1f6d3a08
RH
1075 rtx x = gen_reg_rtx (reg_mode);
1076
4e3825db 1077 set_rtl (var, x);
1f6d3a08
RH
1078
1079 /* Note if the object is a user variable. */
4e3825db
MM
1080 if (!DECL_ARTIFICIAL (decl))
1081 mark_user_reg (x);
1f6d3a08 1082
61021c2c 1083 if (POINTER_TYPE_P (type))
4e3825db 1084 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
1085}
1086
1087/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1088 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1089 to pick something that won't crash the rest of the compiler. */
1090
1091static void
1092expand_one_error_var (tree var)
1093{
1094 enum machine_mode mode = DECL_MODE (var);
1095 rtx x;
1096
1097 if (mode == BLKmode)
1098 x = gen_rtx_MEM (BLKmode, const0_rtx);
1099 else if (mode == VOIDmode)
1100 x = const0_rtx;
1101 else
1102 x = gen_reg_rtx (mode);
1103
1104 SET_DECL_RTL (var, x);
1105}
1106
c22cacf3 1107/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1108 allocated to the local stack frame. Return true if we wish to
1109 add VAR to STACK_VARS so that it will be coalesced with other
1110 variables. Return false to allocate VAR immediately.
1111
1112 This function is used to reduce the number of variables considered
1113 for coalescing, which reduces the size of the quadratic problem. */
1114
1115static bool
1116defer_stack_allocation (tree var, bool toplevel)
1117{
7d69de61
RH
1118 /* If stack protection is enabled, *all* stack variables must be deferred,
1119 so that we can re-order the strings to the top of the frame. */
1120 if (flag_stack_protect)
1121 return true;
1122
1f6d3a08
RH
1123 /* Variables in the outermost scope automatically conflict with
1124 every other variable. The only reason to want to defer them
1125 at all is that, after sorting, we can more efficiently pack
1126 small variables in the stack frame. Continue to defer at -O2. */
1127 if (toplevel && optimize < 2)
1128 return false;
1129
1130 /* Without optimization, *most* variables are allocated from the
1131 stack, which makes the quadratic problem large exactly when we
c22cacf3 1132 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1133 other hand, we don't want the function's stack frame size to
1134 get completely out of hand. So we avoid adding scalars and
1135 "small" aggregates to the list at all. */
1136 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1137 return false;
1138
1139 return true;
1140}
1141
1142/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1143 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
1144 expanded yet, merely recorded.
1145 When REALLY_EXPAND is false, only add stack values to be allocated.
1146 Return stack usage this variable is supposed to take.
1147*/
1f6d3a08 1148
ff28a94d
JH
1149static HOST_WIDE_INT
1150expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1151{
4e3825db
MM
1152 tree origvar = var;
1153 var = SSAVAR (var);
1154
2e3f842f
L
1155 if (SUPPORTS_STACK_ALIGNMENT
1156 && TREE_TYPE (var) != error_mark_node
1157 && TREE_CODE (var) == VAR_DECL)
1158 {
1159 unsigned int align;
1160
1161 /* Because we don't know if VAR will be in register or on stack,
1162 we conservatively assume it will be on stack even if VAR is
1163 eventually put into register after RA pass. For non-automatic
1164 variables, which won't be on stack, we collect alignment of
1165 type and ignore user specified alignment. */
1166 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1167 align = TYPE_ALIGN (TREE_TYPE (var));
1168 else
1169 align = DECL_ALIGN (var);
1170
1171 if (crtl->stack_alignment_estimated < align)
1172 {
1173 /* stack_alignment_estimated shouldn't change after stack
1174 realign decision made */
1175 gcc_assert(!crtl->stack_realign_processed);
1176 crtl->stack_alignment_estimated = align;
1177 }
1178 }
1179
4e3825db
MM
1180 if (TREE_CODE (origvar) == SSA_NAME)
1181 {
1182 gcc_assert (TREE_CODE (var) != VAR_DECL
1183 || (!DECL_EXTERNAL (var)
1184 && !DECL_HAS_VALUE_EXPR_P (var)
1185 && !TREE_STATIC (var)
4e3825db
MM
1186 && TREE_TYPE (var) != error_mark_node
1187 && !DECL_HARD_REGISTER (var)
1188 && really_expand));
1189 }
1190 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1191 ;
1f6d3a08
RH
1192 else if (DECL_EXTERNAL (var))
1193 ;
833b3afe 1194 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1195 ;
1196 else if (TREE_STATIC (var))
7e8b322a 1197 ;
eb7adebc 1198 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1199 ;
1200 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1201 {
1202 if (really_expand)
1203 expand_one_error_var (var);
1204 }
4e3825db 1205 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1206 {
1207 if (really_expand)
1208 expand_one_hard_reg_var (var);
1209 }
1f6d3a08 1210 else if (use_register_for_decl (var))
ff28a94d
JH
1211 {
1212 if (really_expand)
4e3825db 1213 expand_one_register_var (origvar);
ff28a94d 1214 }
1f6d3a08 1215 else if (defer_stack_allocation (var, toplevel))
4e3825db 1216 add_stack_var (origvar);
1f6d3a08 1217 else
ff28a94d 1218 {
bd9f1b4b 1219 if (really_expand)
4e3825db 1220 expand_one_stack_var (origvar);
ff28a94d
JH
1221 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1222 }
1223 return 0;
1f6d3a08
RH
1224}
1225
1226/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1227 expanding variables. Those variables that can be put into registers
1228 are allocated pseudos; those that can't are put on the stack.
1229
1230 TOPLEVEL is true if this is the outermost BLOCK. */
1231
1232static void
1233expand_used_vars_for_block (tree block, bool toplevel)
1234{
1235 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1236 tree t;
1237
1238 old_sv_num = toplevel ? 0 : stack_vars_num;
1239
1240 /* Expand all variables at this level. */
1241 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1242 if (TREE_USED (t))
ff28a94d 1243 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1244
1245 this_sv_num = stack_vars_num;
1246
1247 /* Expand all variables at containing levels. */
1248 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1249 expand_used_vars_for_block (t, false);
1250
1251 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1252 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
1253 tree in the interference graph. Here we cause all variables at this
1254 level, and all sublevels, to conflict. Do make certain that a
1255 variable conflicts with itself. */
1256 if (old_sv_num < this_sv_num)
1257 {
1258 new_sv_num = stack_vars_num;
1259 resize_stack_vars_conflict (new_sv_num);
1260
1261 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
1262 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1263 add_stack_var_conflict (i, j);
1f6d3a08
RH
1264 }
1265}
1266
1267/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1268 and clear TREE_USED on all local variables. */
1269
1270static void
1271clear_tree_used (tree block)
1272{
1273 tree t;
1274
1275 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1276 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1277 TREE_USED (t) = 0;
1278
1279 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1280 clear_tree_used (t);
1281}
1282
7d69de61
RH
1283/* Examine TYPE and determine a bit mask of the following features. */
1284
1285#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1286#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1287#define SPCT_HAS_ARRAY 4
1288#define SPCT_HAS_AGGREGATE 8
1289
1290static unsigned int
1291stack_protect_classify_type (tree type)
1292{
1293 unsigned int ret = 0;
1294 tree t;
1295
1296 switch (TREE_CODE (type))
1297 {
1298 case ARRAY_TYPE:
1299 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1300 if (t == char_type_node
1301 || t == signed_char_type_node
1302 || t == unsigned_char_type_node)
1303 {
15362b89
JJ
1304 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1305 unsigned HOST_WIDE_INT len;
7d69de61 1306
15362b89
JJ
1307 if (!TYPE_SIZE_UNIT (type)
1308 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1309 len = max;
7d69de61 1310 else
15362b89 1311 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1312
1313 if (len < max)
1314 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1315 else
1316 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1317 }
1318 else
1319 ret = SPCT_HAS_ARRAY;
1320 break;
1321
1322 case UNION_TYPE:
1323 case QUAL_UNION_TYPE:
1324 case RECORD_TYPE:
1325 ret = SPCT_HAS_AGGREGATE;
1326 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1327 if (TREE_CODE (t) == FIELD_DECL)
1328 ret |= stack_protect_classify_type (TREE_TYPE (t));
1329 break;
1330
1331 default:
1332 break;
1333 }
1334
1335 return ret;
1336}
1337
a4d05547
KH
1338/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1339 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1340 any variable in this function. The return value is the phase number in
1341 which the variable should be allocated. */
1342
1343static int
1344stack_protect_decl_phase (tree decl)
1345{
1346 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1347 int ret = 0;
1348
1349 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1350 has_short_buffer = true;
1351
1352 if (flag_stack_protect == 2)
1353 {
1354 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1355 && !(bits & SPCT_HAS_AGGREGATE))
1356 ret = 1;
1357 else if (bits & SPCT_HAS_ARRAY)
1358 ret = 2;
1359 }
1360 else
1361 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1362
1363 if (ret)
1364 has_protected_decls = true;
1365
1366 return ret;
1367}
1368
1369/* Two helper routines that check for phase 1 and phase 2. These are used
1370 as callbacks for expand_stack_vars. */
1371
1372static bool
1373stack_protect_decl_phase_1 (tree decl)
1374{
1375 return stack_protect_decl_phase (decl) == 1;
1376}
1377
1378static bool
1379stack_protect_decl_phase_2 (tree decl)
1380{
1381 return stack_protect_decl_phase (decl) == 2;
1382}
1383
1384/* Ensure that variables in different stack protection phases conflict
1385 so that they are not merged and share the same stack slot. */
1386
1387static void
1388add_stack_protection_conflicts (void)
1389{
1390 size_t i, j, n = stack_vars_num;
1391 unsigned char *phase;
1392
1393 phase = XNEWVEC (unsigned char, n);
1394 for (i = 0; i < n; ++i)
1395 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1396
1397 for (i = 0; i < n; ++i)
1398 {
1399 unsigned char ph_i = phase[i];
1400 for (j = 0; j < i; ++j)
1401 if (ph_i != phase[j])
1402 add_stack_var_conflict (i, j);
1403 }
1404
1405 XDELETEVEC (phase);
1406}
1407
1408/* Create a decl for the guard at the top of the stack frame. */
1409
1410static void
1411create_stack_guard (void)
1412{
c2255bc4
AH
1413 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1414 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1415 TREE_THIS_VOLATILE (guard) = 1;
1416 TREE_USED (guard) = 1;
1417 expand_one_stack_var (guard);
cb91fab0 1418 crtl->stack_protect_guard = guard;
7d69de61
RH
1419}
1420
ff28a94d
JH
1421/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1422 expanding variables. Those variables that can be put into registers
1423 are allocated pseudos; those that can't are put on the stack.
1424
1425 TOPLEVEL is true if this is the outermost BLOCK. */
1426
1427static HOST_WIDE_INT
1428account_used_vars_for_block (tree block, bool toplevel)
1429{
1430 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1431 tree t;
1432 HOST_WIDE_INT size = 0;
1433
1434 old_sv_num = toplevel ? 0 : stack_vars_num;
1435
1436 /* Expand all variables at this level. */
1437 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1438 if (TREE_USED (t))
1439 size += expand_one_var (t, toplevel, false);
1440
1441 this_sv_num = stack_vars_num;
1442
1443 /* Expand all variables at containing levels. */
1444 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1445 size += account_used_vars_for_block (t, false);
1446
1447 /* Since we do not track exact variable lifetimes (which is not even
1448 possible for variables whose address escapes), we mirror the block
1449 tree in the interference graph. Here we cause all variables at this
1450 level, and all sublevels, to conflict. Do make certain that a
1451 variable conflicts with itself. */
1452 if (old_sv_num < this_sv_num)
1453 {
1454 new_sv_num = stack_vars_num;
1455 resize_stack_vars_conflict (new_sv_num);
1456
1457 for (i = old_sv_num; i < new_sv_num; ++i)
1458 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1459 add_stack_var_conflict (i, j);
1460 }
1461 return size;
1462}
1463
1464/* Prepare for expanding variables. */
1465static void
1466init_vars_expansion (void)
1467{
1468 tree t;
cb91fab0
JH
1469 /* Set TREE_USED on all variables in the local_decls. */
1470 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1471 TREE_USED (TREE_VALUE (t)) = 1;
1472
1473 /* Clear TREE_USED on all variables associated with a block scope. */
1474 clear_tree_used (DECL_INITIAL (current_function_decl));
1475
1476 /* Initialize local stack smashing state. */
1477 has_protected_decls = false;
1478 has_short_buffer = false;
1479}
1480
1481/* Free up stack variable graph data. */
1482static void
1483fini_vars_expansion (void)
1484{
1485 XDELETEVEC (stack_vars);
1486 XDELETEVEC (stack_vars_sorted);
1487 XDELETEVEC (stack_vars_conflict);
1488 stack_vars = NULL;
1489 stack_vars_alloc = stack_vars_num = 0;
1490 stack_vars_conflict = NULL;
1491 stack_vars_conflict_alloc = 0;
1492}
1493
b5a430f3
SB
1494/* Make a fair guess for the size of the stack frame of the current
1495 function. This doesn't have to be exact, the result is only used
1496 in the inline heuristics. So we don't want to run the full stack
1497 var packing algorithm (which is quadratic in the number of stack
1498 vars). Instead, we calculate the total size of all stack vars.
1499 This turns out to be a pretty fair estimate -- packing of stack
1500 vars doesn't happen very often. */
1501
ff28a94d
JH
1502HOST_WIDE_INT
1503estimated_stack_frame_size (void)
1504{
1505 HOST_WIDE_INT size = 0;
b5a430f3 1506 size_t i;
ff28a94d
JH
1507 tree t, outer_block = DECL_INITIAL (current_function_decl);
1508
1509 init_vars_expansion ();
1510
cb91fab0 1511 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1512 {
1513 tree var = TREE_VALUE (t);
1514
1515 if (TREE_USED (var))
1516 size += expand_one_var (var, true, false);
1517 TREE_USED (var) = 1;
1518 }
1519 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1520
ff28a94d
JH
1521 if (stack_vars_num > 0)
1522 {
b5a430f3
SB
1523 /* Fake sorting the stack vars for account_stack_vars (). */
1524 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1525 for (i = 0; i < stack_vars_num; ++i)
1526 stack_vars_sorted[i] = i;
ff28a94d
JH
1527 size += account_stack_vars ();
1528 fini_vars_expansion ();
1529 }
b5a430f3 1530
ff28a94d
JH
1531 return size;
1532}
1533
1f6d3a08 1534/* Expand all variables used in the function. */
727a31fa
RH
1535
1536static void
1537expand_used_vars (void)
1538{
802e9f8e 1539 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
4e3825db 1540 unsigned i;
727a31fa 1541
1f6d3a08
RH
1542 /* Compute the phase of the stack frame for this function. */
1543 {
1544 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1545 int off = STARTING_FRAME_OFFSET % align;
1546 frame_phase = off ? align - off : 0;
1547 }
727a31fa 1548
ff28a94d 1549 init_vars_expansion ();
7d69de61 1550
4e3825db
MM
1551 for (i = 0; i < SA.map->num_partitions; i++)
1552 {
1553 tree var = partition_to_var (SA.map, i);
1554
1555 gcc_assert (is_gimple_reg (var));
1556 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1557 expand_one_var (var, true, true);
1558 else
1559 {
1560 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1561 contain the default def (representing the parm or result itself)
1562 we don't do anything here. But those which don't contain the
1563 default def (representing a temporary based on the parm/result)
1564 we need to allocate space just like for normal VAR_DECLs. */
1565 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1566 {
1567 expand_one_var (var, true, true);
1568 gcc_assert (SA.partition_to_pseudo[i]);
1569 }
1570 }
1571 }
1572
cb91fab0 1573 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1574 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1575 t = cfun->local_decls;
1576 cfun->local_decls = NULL_TREE;
1577 for (; t; t = next)
1f6d3a08
RH
1578 {
1579 tree var = TREE_VALUE (t);
1580 bool expand_now = false;
1581
802e9f8e
JJ
1582 next = TREE_CHAIN (t);
1583
4e3825db
MM
1584 /* Expanded above already. */
1585 if (is_gimple_reg (var))
eb7adebc
MM
1586 {
1587 TREE_USED (var) = 0;
1588 ggc_free (t);
1589 continue;
1590 }
1f6d3a08
RH
1591 /* We didn't set a block for static or extern because it's hard
1592 to tell the difference between a global variable (re)declared
1593 in a local scope, and one that's really declared there to
1594 begin with. And it doesn't really matter much, since we're
1595 not giving them stack space. Expand them now. */
4e3825db 1596 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1597 expand_now = true;
1598
1599 /* If the variable is not associated with any block, then it
1600 was created by the optimizers, and could be live anywhere
1601 in the function. */
1602 else if (TREE_USED (var))
1603 expand_now = true;
1604
1605 /* Finally, mark all variables on the list as used. We'll use
1606 this in a moment when we expand those associated with scopes. */
1607 TREE_USED (var) = 1;
1608
1609 if (expand_now)
802e9f8e
JJ
1610 {
1611 expand_one_var (var, true, true);
1612 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1613 {
1614 rtx rtl = DECL_RTL_IF_SET (var);
1615
1616 /* Keep artificial non-ignored vars in cfun->local_decls
1617 chain until instantiate_decls. */
1618 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1619 {
1620 TREE_CHAIN (t) = cfun->local_decls;
1621 cfun->local_decls = t;
1622 continue;
1623 }
1624 }
1625 }
1626
1627 ggc_free (t);
1f6d3a08 1628 }
1f6d3a08
RH
1629
1630 /* At this point, all variables within the block tree with TREE_USED
1631 set are actually used by the optimized function. Lay them out. */
1632 expand_used_vars_for_block (outer_block, true);
1633
1634 if (stack_vars_num > 0)
1635 {
1636 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1637 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1638 reflect this. */
1639 add_alias_set_conflicts ();
1640
c22cacf3 1641 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1642 vulnerable data and non-vulnerable data. */
1643 if (flag_stack_protect)
1644 add_stack_protection_conflicts ();
1645
c22cacf3 1646 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1647 minimal interference graph, attempt to save some stack space. */
1648 partition_stack_vars ();
1649 if (dump_file)
1650 dump_stack_var_partition ();
7d69de61
RH
1651 }
1652
1653 /* There are several conditions under which we should create a
1654 stack guard: protect-all, alloca used, protected decls present. */
1655 if (flag_stack_protect == 2
1656 || (flag_stack_protect
e3b5732b 1657 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1658 create_stack_guard ();
1f6d3a08 1659
7d69de61
RH
1660 /* Assign rtl to each variable based on these partitions. */
1661 if (stack_vars_num > 0)
1662 {
1663 /* Reorder decls to be protected by iterating over the variables
1664 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1665 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1666 earlier, such that we naturally see these variables first,
1667 and thus naturally allocate things in the right order. */
1668 if (has_protected_decls)
1669 {
1670 /* Phase 1 contains only character arrays. */
1671 expand_stack_vars (stack_protect_decl_phase_1);
1672
1673 /* Phase 2 contains other kinds of arrays. */
1674 if (flag_stack_protect == 2)
1675 expand_stack_vars (stack_protect_decl_phase_2);
1676 }
1677
1678 expand_stack_vars (NULL);
1f6d3a08 1679
ff28a94d 1680 fini_vars_expansion ();
1f6d3a08
RH
1681 }
1682
1683 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1684 if (STACK_ALIGNMENT_NEEDED)
1685 {
1686 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1687 if (!FRAME_GROWS_DOWNWARD)
1688 frame_offset += align - 1;
1689 frame_offset &= -align;
1690 }
727a31fa
RH
1691}
1692
1693
b7211528
SB
1694/* If we need to produce a detailed dump, print the tree representation
1695 for STMT to the dump file. SINCE is the last RTX after which the RTL
1696 generated for STMT should have been appended. */
1697
1698static void
726a989a 1699maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1700{
1701 if (dump_file && (dump_flags & TDF_DETAILS))
1702 {
1703 fprintf (dump_file, "\n;; ");
726a989a 1704 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
b7211528
SB
1705 fprintf (dump_file, "\n");
1706
1707 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1708 }
1709}
1710
8b11009b
ZD
1711/* Maps the blocks that do not contain tree labels to rtx labels. */
1712
1713static struct pointer_map_t *lab_rtx_for_bb;
1714
a9b77cd1
ZD
1715/* Returns the label_rtx expression for a label starting basic block BB. */
1716
1717static rtx
726a989a 1718label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1719{
726a989a
RB
1720 gimple_stmt_iterator gsi;
1721 tree lab;
1722 gimple lab_stmt;
8b11009b 1723 void **elt;
a9b77cd1
ZD
1724
1725 if (bb->flags & BB_RTL)
1726 return block_label (bb);
1727
8b11009b
ZD
1728 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1729 if (elt)
ae50c0cb 1730 return (rtx) *elt;
8b11009b
ZD
1731
1732 /* Find the tree label if it is present. */
1733
726a989a 1734 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1735 {
726a989a
RB
1736 lab_stmt = gsi_stmt (gsi);
1737 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1738 break;
1739
726a989a 1740 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1741 if (DECL_NONLOCAL (lab))
1742 break;
1743
1744 return label_rtx (lab);
1745 }
1746
8b11009b
ZD
1747 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1748 *elt = gen_label_rtx ();
ae50c0cb 1749 return (rtx) *elt;
a9b77cd1
ZD
1750}
1751
726a989a 1752
529ff441
MM
1753/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1754 of a basic block where we just expanded the conditional at the end,
1755 possibly clean up the CFG and instruction sequence. */
1756
1757static void
1758maybe_cleanup_end_of_block (edge e)
1759{
1760 /* Special case: when jumpif decides that the condition is
1761 trivial it emits an unconditional jump (and the necessary
1762 barrier). But we still have two edges, the fallthru one is
1763 wrong. purge_dead_edges would clean this up later. Unfortunately
1764 we have to insert insns (and split edges) before
1765 find_many_sub_basic_blocks and hence before purge_dead_edges.
1766 But splitting edges might create new blocks which depend on the
1767 fact that if there are two edges there's no barrier. So the
1768 barrier would get lost and verify_flow_info would ICE. Instead
1769 of auditing all edge splitters to care for the barrier (which
1770 normally isn't there in a cleaned CFG), fix it here. */
1771 if (BARRIER_P (get_last_insn ()))
1772 {
1773 basic_block bb = e->src;
1774 rtx insn;
1775 remove_edge (e);
1776 /* Now, we have a single successor block, if we have insns to
1777 insert on the remaining edge we potentially will insert
1778 it at the end of this block (if the dest block isn't feasible)
1779 in order to avoid splitting the edge. This insertion will take
1780 place in front of the last jump. But we might have emitted
1781 multiple jumps (conditional and one unconditional) to the
1782 same destination. Inserting in front of the last one then
1783 is a problem. See PR 40021. We fix this by deleting all
1784 jumps except the last unconditional one. */
1785 insn = PREV_INSN (get_last_insn ());
1786 /* Make sure we have an unconditional jump. Otherwise we're
1787 confused. */
1788 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1789 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1790 {
1791 insn = PREV_INSN (insn);
1792 if (JUMP_P (NEXT_INSN (insn)))
1793 delete_insn (NEXT_INSN (insn));
1794 }
1795 }
1796}
1797
1798
726a989a 1799/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1800 Returns a new basic block if we've terminated the current basic
1801 block and created a new one. */
1802
1803static basic_block
726a989a 1804expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1805{
1806 basic_block new_bb, dest;
1807 edge new_edge;
1808 edge true_edge;
1809 edge false_edge;
726a989a 1810 tree pred = gimple_cond_pred_to_tree (stmt);
b7211528
SB
1811 rtx last2, last;
1812
1813 last2 = last = get_last_insn ();
80c7a9eb
RH
1814
1815 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1816 if (gimple_has_location (stmt))
80c7a9eb 1817 {
726a989a
RB
1818 set_curr_insn_source_location (gimple_location (stmt));
1819 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1820 }
1821
1822 /* These flags have no purpose in RTL land. */
1823 true_edge->flags &= ~EDGE_TRUE_VALUE;
1824 false_edge->flags &= ~EDGE_FALSE_VALUE;
1825
1826 /* We can either have a pure conditional jump with one fallthru edge or
1827 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1828 if (false_edge->dest == bb->next_bb)
80c7a9eb 1829 {
a9b77cd1 1830 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1831 add_reg_br_prob_note (last, true_edge->probability);
726a989a 1832 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1833 if (true_edge->goto_locus)
7241571e
JJ
1834 {
1835 set_curr_insn_source_location (true_edge->goto_locus);
1836 set_curr_insn_block (true_edge->goto_block);
1837 true_edge->goto_locus = curr_insn_locator ();
1838 }
1839 true_edge->goto_block = NULL;
a9b77cd1 1840 false_edge->flags |= EDGE_FALLTHRU;
726a989a 1841 ggc_free (pred);
529ff441 1842 maybe_cleanup_end_of_block (false_edge);
80c7a9eb
RH
1843 return NULL;
1844 }
a9b77cd1 1845 if (true_edge->dest == bb->next_bb)
80c7a9eb 1846 {
a9b77cd1 1847 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
10d22567 1848 add_reg_br_prob_note (last, false_edge->probability);
726a989a 1849 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1850 if (false_edge->goto_locus)
7241571e
JJ
1851 {
1852 set_curr_insn_source_location (false_edge->goto_locus);
1853 set_curr_insn_block (false_edge->goto_block);
1854 false_edge->goto_locus = curr_insn_locator ();
1855 }
1856 false_edge->goto_block = NULL;
a9b77cd1 1857 true_edge->flags |= EDGE_FALLTHRU;
726a989a 1858 ggc_free (pred);
529ff441 1859 maybe_cleanup_end_of_block (true_edge);
80c7a9eb
RH
1860 return NULL;
1861 }
80c7a9eb 1862
a9b77cd1 1863 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1864 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1865 last = get_last_insn ();
7241571e
JJ
1866 if (false_edge->goto_locus)
1867 {
1868 set_curr_insn_source_location (false_edge->goto_locus);
1869 set_curr_insn_block (false_edge->goto_block);
1870 false_edge->goto_locus = curr_insn_locator ();
1871 }
1872 false_edge->goto_block = NULL;
a9b77cd1 1873 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
1874
1875 BB_END (bb) = last;
1876 if (BARRIER_P (BB_END (bb)))
1877 BB_END (bb) = PREV_INSN (BB_END (bb));
1878 update_bb_for_insn (bb);
1879
1880 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
1881 dest = false_edge->dest;
1882 redirect_edge_succ (false_edge, new_bb);
1883 false_edge->flags |= EDGE_FALLTHRU;
1884 new_bb->count = false_edge->count;
1885 new_bb->frequency = EDGE_FREQUENCY (false_edge);
1886 new_edge = make_edge (new_bb, dest, 0);
1887 new_edge->probability = REG_BR_PROB_BASE;
1888 new_edge->count = new_bb->count;
1889 if (BARRIER_P (BB_END (new_bb)))
1890 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
1891 update_bb_for_insn (new_bb);
1892
726a989a 1893 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 1894
7787b4aa
JJ
1895 if (true_edge->goto_locus)
1896 {
1897 set_curr_insn_source_location (true_edge->goto_locus);
1898 set_curr_insn_block (true_edge->goto_block);
1899 true_edge->goto_locus = curr_insn_locator ();
1900 }
1901 true_edge->goto_block = NULL;
1902
726a989a 1903 ggc_free (pred);
80c7a9eb
RH
1904 return new_bb;
1905}
1906
726a989a 1907/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
1908 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
1909 generated a tail call (something that might be denied by the ABI
cea49550
RH
1910 rules governing the call; see calls.c).
1911
1912 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
1913 can still reach the rest of BB. The case here is __builtin_sqrt,
1914 where the NaN result goes through the external function (with a
1915 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
1916
1917static basic_block
726a989a 1918expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 1919{
b7211528 1920 rtx last2, last;
224e770b 1921 edge e;
628f6a4e 1922 edge_iterator ei;
224e770b
RH
1923 int probability;
1924 gcov_type count;
726a989a 1925 tree stmt_tree = gimple_to_tree (stmt);
80c7a9eb 1926
b7211528
SB
1927 last2 = last = get_last_insn ();
1928
726a989a
RB
1929 expand_expr_stmt (stmt_tree);
1930
1931 release_stmt_tree (stmt, stmt_tree);
80c7a9eb
RH
1932
1933 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
1934 if (CALL_P (last) && SIBLING_CALL_P (last))
1935 goto found;
80c7a9eb 1936
726a989a 1937 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 1938
cea49550 1939 *can_fallthru = true;
224e770b 1940 return NULL;
80c7a9eb 1941
224e770b
RH
1942 found:
1943 /* ??? Wouldn't it be better to just reset any pending stack adjust?
1944 Any instructions emitted here are about to be deleted. */
1945 do_pending_stack_adjust ();
1946
1947 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
1948 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
1949 EH or abnormal edges, we shouldn't have created a tail call in
1950 the first place. So it seems to me we should just be removing
1951 all edges here, or redirecting the existing fallthru edge to
1952 the exit block. */
1953
224e770b
RH
1954 probability = 0;
1955 count = 0;
224e770b 1956
628f6a4e
BE
1957 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
1958 {
224e770b
RH
1959 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
1960 {
1961 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 1962 {
224e770b
RH
1963 e->dest->count -= e->count;
1964 e->dest->frequency -= EDGE_FREQUENCY (e);
1965 if (e->dest->count < 0)
c22cacf3 1966 e->dest->count = 0;
224e770b 1967 if (e->dest->frequency < 0)
c22cacf3 1968 e->dest->frequency = 0;
80c7a9eb 1969 }
224e770b
RH
1970 count += e->count;
1971 probability += e->probability;
1972 remove_edge (e);
80c7a9eb 1973 }
628f6a4e
BE
1974 else
1975 ei_next (&ei);
80c7a9eb
RH
1976 }
1977
224e770b
RH
1978 /* This is somewhat ugly: the call_expr expander often emits instructions
1979 after the sibcall (to perform the function return). These confuse the
12eff7b7 1980 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 1981 last = NEXT_INSN (last);
341c100f 1982 gcc_assert (BARRIER_P (last));
cea49550
RH
1983
1984 *can_fallthru = false;
224e770b
RH
1985 while (NEXT_INSN (last))
1986 {
1987 /* For instance an sqrt builtin expander expands if with
1988 sibcall in the then and label for `else`. */
1989 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
1990 {
1991 *can_fallthru = true;
1992 break;
1993 }
224e770b
RH
1994 delete_insn (NEXT_INSN (last));
1995 }
1996
1997 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
1998 e->probability += probability;
1999 e->count += count;
2000 BB_END (bb) = last;
2001 update_bb_for_insn (bb);
2002
2003 if (NEXT_INSN (last))
2004 {
2005 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2006
2007 last = BB_END (bb);
2008 if (BARRIER_P (last))
2009 BB_END (bb) = PREV_INSN (last);
2010 }
2011
726a989a 2012 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2013
224e770b 2014 return bb;
80c7a9eb
RH
2015}
2016
242229bb
JH
2017/* Expand basic block BB from GIMPLE trees to RTL. */
2018
2019static basic_block
10d22567 2020expand_gimple_basic_block (basic_block bb)
242229bb 2021{
726a989a
RB
2022 gimple_stmt_iterator gsi;
2023 gimple_seq stmts;
2024 gimple stmt = NULL;
242229bb
JH
2025 rtx note, last;
2026 edge e;
628f6a4e 2027 edge_iterator ei;
8b11009b 2028 void **elt;
242229bb
JH
2029
2030 if (dump_file)
726a989a
RB
2031 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2032 bb->index);
2033
2034 /* Note that since we are now transitioning from GIMPLE to RTL, we
2035 cannot use the gsi_*_bb() routines because they expect the basic
2036 block to be in GIMPLE, instead of RTL. Therefore, we need to
2037 access the BB sequence directly. */
2038 stmts = bb_seq (bb);
2039 bb->il.gimple = NULL;
bf08ebeb 2040 rtl_profile_for_bb (bb);
5e2d947c
JH
2041 init_rtl_bb_info (bb);
2042 bb->flags |= BB_RTL;
2043
a9b77cd1
ZD
2044 /* Remove the RETURN_EXPR if we may fall though to the exit
2045 instead. */
726a989a
RB
2046 gsi = gsi_last (stmts);
2047 if (!gsi_end_p (gsi)
2048 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 2049 {
726a989a 2050 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
2051
2052 gcc_assert (single_succ_p (bb));
2053 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2054
2055 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 2056 && !gimple_return_retval (ret_stmt))
a9b77cd1 2057 {
726a989a 2058 gsi_remove (&gsi, false);
a9b77cd1
ZD
2059 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2060 }
2061 }
2062
726a989a
RB
2063 gsi = gsi_start (stmts);
2064 if (!gsi_end_p (gsi))
8b11009b 2065 {
726a989a
RB
2066 stmt = gsi_stmt (gsi);
2067 if (gimple_code (stmt) != GIMPLE_LABEL)
2068 stmt = NULL;
8b11009b 2069 }
242229bb 2070
8b11009b
ZD
2071 elt = pointer_map_contains (lab_rtx_for_bb, bb);
2072
2073 if (stmt || elt)
242229bb
JH
2074 {
2075 last = get_last_insn ();
2076
8b11009b
ZD
2077 if (stmt)
2078 {
726a989a
RB
2079 tree stmt_tree = gimple_to_tree (stmt);
2080 expand_expr_stmt (stmt_tree);
2081 release_stmt_tree (stmt, stmt_tree);
2082 gsi_next (&gsi);
8b11009b
ZD
2083 }
2084
2085 if (elt)
ae50c0cb 2086 emit_label ((rtx) *elt);
242229bb 2087
caf93cb0 2088 /* Java emits line number notes in the top of labels.
c22cacf3 2089 ??? Make this go away once line number notes are obsoleted. */
242229bb 2090 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 2091 if (NOTE_P (BB_HEAD (bb)))
242229bb 2092 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 2093 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 2094
726a989a 2095 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
2096 }
2097 else
2098 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
2099
2100 NOTE_BASIC_BLOCK (note) = bb;
2101
726a989a 2102 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 2103 {
726a989a 2104 gimple stmt = gsi_stmt (gsi);
cea49550 2105 basic_block new_bb;
242229bb 2106
242229bb
JH
2107 /* Expand this statement, then evaluate the resulting RTL and
2108 fixup the CFG accordingly. */
726a989a 2109 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 2110 {
726a989a 2111 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
2112 if (new_bb)
2113 return new_bb;
2114 }
80c7a9eb 2115 else
242229bb 2116 {
726a989a 2117 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
2118 {
2119 bool can_fallthru;
2120 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
2121 if (new_bb)
2122 {
2123 if (can_fallthru)
2124 bb = new_bb;
2125 else
2126 return new_bb;
2127 }
2128 }
4d7a65ea 2129 else
b7211528 2130 {
4e3825db
MM
2131 def_operand_p def_p;
2132 tree stmt_tree;
2133 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
2134
2135 if (def_p != NULL)
2136 {
2137 /* Ignore this stmt if it is in the list of
2138 replaceable expressions. */
2139 if (SA.values
e97809c6
MM
2140 && bitmap_bit_p (SA.values,
2141 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
2142 continue;
2143 }
2144 stmt_tree = gimple_to_tree (stmt);
b7211528 2145 last = get_last_insn ();
726a989a
RB
2146 expand_expr_stmt (stmt_tree);
2147 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2148 release_stmt_tree (stmt, stmt_tree);
b7211528 2149 }
242229bb
JH
2150 }
2151 }
2152
7241571e 2153 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
2154 FOR_EACH_EDGE (e, ei, bb->succs)
2155 {
7241571e
JJ
2156 if (e->goto_locus && e->goto_block)
2157 {
2158 set_curr_insn_source_location (e->goto_locus);
2159 set_curr_insn_block (e->goto_block);
2160 e->goto_locus = curr_insn_locator ();
2161 }
2162 e->goto_block = NULL;
2163 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
2164 {
2165 emit_jump (label_rtx_for_bb (e->dest));
2166 e->flags &= ~EDGE_FALLTHRU;
2167 }
a9b77cd1
ZD
2168 }
2169
242229bb
JH
2170 do_pending_stack_adjust ();
2171
3f117656 2172 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
2173 before a barrier and/or table jump insn. */
2174 last = get_last_insn ();
4b4bf941 2175 if (BARRIER_P (last))
242229bb
JH
2176 last = PREV_INSN (last);
2177 if (JUMP_TABLE_DATA_P (last))
2178 last = PREV_INSN (PREV_INSN (last));
2179 BB_END (bb) = last;
caf93cb0 2180
242229bb 2181 update_bb_for_insn (bb);
80c7a9eb 2182
242229bb
JH
2183 return bb;
2184}
2185
2186
2187/* Create a basic block for initialization code. */
2188
2189static basic_block
2190construct_init_block (void)
2191{
2192 basic_block init_block, first_block;
fd44f634
JH
2193 edge e = NULL;
2194 int flags;
275a4187 2195
fd44f634
JH
2196 /* Multiple entry points not supported yet. */
2197 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
2198 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2199 init_rtl_bb_info (EXIT_BLOCK_PTR);
2200 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2201 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 2202
fd44f634 2203 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 2204
fd44f634
JH
2205 /* When entry edge points to first basic block, we don't need jump,
2206 otherwise we have to jump into proper target. */
2207 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2208 {
726a989a 2209 tree label = gimple_block_label (e->dest);
fd44f634
JH
2210
2211 emit_jump (label_rtx (label));
2212 flags = 0;
275a4187 2213 }
fd44f634
JH
2214 else
2215 flags = EDGE_FALLTHRU;
242229bb
JH
2216
2217 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2218 get_last_insn (),
2219 ENTRY_BLOCK_PTR);
2220 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2221 init_block->count = ENTRY_BLOCK_PTR->count;
2222 if (e)
2223 {
2224 first_block = e->dest;
2225 redirect_edge_succ (e, init_block);
fd44f634 2226 e = make_edge (init_block, first_block, flags);
242229bb
JH
2227 }
2228 else
2229 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2230 e->probability = REG_BR_PROB_BASE;
2231 e->count = ENTRY_BLOCK_PTR->count;
2232
2233 update_bb_for_insn (init_block);
2234 return init_block;
2235}
2236
55e092c4
JH
2237/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2238 found in the block tree. */
2239
2240static void
2241set_block_levels (tree block, int level)
2242{
2243 while (block)
2244 {
2245 BLOCK_NUMBER (block) = level;
2246 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2247 block = BLOCK_CHAIN (block);
2248 }
2249}
242229bb
JH
2250
2251/* Create a block containing landing pads and similar stuff. */
2252
2253static void
2254construct_exit_block (void)
2255{
2256 rtx head = get_last_insn ();
2257 rtx end;
2258 basic_block exit_block;
628f6a4e
BE
2259 edge e, e2;
2260 unsigned ix;
2261 edge_iterator ei;
071a42f9 2262 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 2263
bf08ebeb
JH
2264 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2265
caf93cb0 2266 /* Make sure the locus is set to the end of the function, so that
242229bb 2267 epilogue line numbers and warnings are set properly. */
6773e15f 2268 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
2269 input_location = cfun->function_end_locus;
2270
2271 /* The following insns belong to the top scope. */
55e092c4 2272 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 2273
242229bb
JH
2274 /* Generate rtl for function exit. */
2275 expand_function_end ();
2276
2277 end = get_last_insn ();
2278 if (head == end)
2279 return;
071a42f9
JH
2280 /* While emitting the function end we could move end of the last basic block.
2281 */
2282 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 2283 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 2284 head = NEXT_INSN (head);
80c7a9eb
RH
2285 exit_block = create_basic_block (NEXT_INSN (head), end,
2286 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
2287 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2288 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
2289
2290 ix = 0;
2291 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 2292 {
8fb790fd 2293 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 2294 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
2295 redirect_edge_succ (e, exit_block);
2296 else
2297 ix++;
242229bb 2298 }
628f6a4e 2299
242229bb
JH
2300 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2301 e->probability = REG_BR_PROB_BASE;
2302 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 2303 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
2304 if (e2 != e)
2305 {
c22cacf3 2306 e->count -= e2->count;
242229bb
JH
2307 exit_block->count -= e2->count;
2308 exit_block->frequency -= EDGE_FREQUENCY (e2);
2309 }
2310 if (e->count < 0)
2311 e->count = 0;
2312 if (exit_block->count < 0)
2313 exit_block->count = 0;
2314 if (exit_block->frequency < 0)
2315 exit_block->frequency = 0;
2316 update_bb_for_insn (exit_block);
2317}
2318
c22cacf3 2319/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
2320 Look for ARRAY_REF nodes with non-constant indexes and mark them
2321 addressable. */
2322
2323static tree
2324discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2325 void *data ATTRIBUTE_UNUSED)
2326{
2327 tree t = *tp;
2328
2329 if (IS_TYPE_OR_DECL_P (t))
2330 *walk_subtrees = 0;
2331 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2332 {
2333 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2334 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2335 && (!TREE_OPERAND (t, 2)
2336 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2337 || (TREE_CODE (t) == COMPONENT_REF
2338 && (!TREE_OPERAND (t,2)
2339 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2340 || TREE_CODE (t) == BIT_FIELD_REF
2341 || TREE_CODE (t) == REALPART_EXPR
2342 || TREE_CODE (t) == IMAGPART_EXPR
2343 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 2344 || CONVERT_EXPR_P (t))
a1b23b2f
UW
2345 t = TREE_OPERAND (t, 0);
2346
2347 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2348 {
2349 t = get_base_address (t);
6f11d690
RG
2350 if (t && DECL_P (t)
2351 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
2352 TREE_ADDRESSABLE (t) = 1;
2353 }
2354
2355 *walk_subtrees = 0;
2356 }
2357
2358 return NULL_TREE;
2359}
2360
2361/* RTL expansion is not able to compile array references with variable
2362 offsets for arrays stored in single register. Discover such
2363 expressions and mark variables as addressable to avoid this
2364 scenario. */
2365
2366static void
2367discover_nonconstant_array_refs (void)
2368{
2369 basic_block bb;
726a989a 2370 gimple_stmt_iterator gsi;
a1b23b2f
UW
2371
2372 FOR_EACH_BB (bb)
726a989a
RB
2373 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2374 {
2375 gimple stmt = gsi_stmt (gsi);
2376 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2377 }
a1b23b2f
UW
2378}
2379
2e3f842f
L
2380/* This function sets crtl->args.internal_arg_pointer to a virtual
2381 register if DRAP is needed. Local register allocator will replace
2382 virtual_incoming_args_rtx with the virtual register. */
2383
2384static void
2385expand_stack_alignment (void)
2386{
2387 rtx drap_rtx;
e939805b 2388 unsigned int preferred_stack_boundary;
2e3f842f
L
2389
2390 if (! SUPPORTS_STACK_ALIGNMENT)
2391 return;
2392
2393 if (cfun->calls_alloca
2394 || cfun->has_nonlocal_label
2395 || crtl->has_nonlocal_goto)
2396 crtl->need_drap = true;
2397
2398 gcc_assert (crtl->stack_alignment_needed
2399 <= crtl->stack_alignment_estimated);
2400
2e3f842f
L
2401 /* Update crtl->stack_alignment_estimated and use it later to align
2402 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2403 exceptions since callgraph doesn't collect incoming stack alignment
2404 in this case. */
2405 if (flag_non_call_exceptions
2406 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2407 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2408 else
2409 preferred_stack_boundary = crtl->preferred_stack_boundary;
2410 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2411 crtl->stack_alignment_estimated = preferred_stack_boundary;
2412 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2413 crtl->stack_alignment_needed = preferred_stack_boundary;
2414
2415 crtl->stack_realign_needed
e939805b 2416 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 2417 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
2418
2419 crtl->stack_realign_processed = true;
2420
2421 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2422 alignment. */
2423 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2424 drap_rtx = targetm.calls.get_drap_rtx ();
2425
d015f7cc
L
2426 /* stack_realign_drap and drap_rtx must match. */
2427 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2428
2e3f842f
L
2429 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2430 if (NULL != drap_rtx)
2431 {
2432 crtl->args.internal_arg_pointer = drap_rtx;
2433
2434 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2435 needed. */
2436 fixup_tail_calls ();
2437 }
2438}
2439
242229bb
JH
2440/* Translate the intermediate representation contained in the CFG
2441 from GIMPLE trees to RTL.
2442
2443 We do conversion per basic block and preserve/update the tree CFG.
2444 This implies we have to do some magic as the CFG can simultaneously
2445 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 2446 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
2447 the expansion. */
2448
c2924966 2449static unsigned int
726a989a 2450gimple_expand_cfg (void)
242229bb
JH
2451{
2452 basic_block bb, init_block;
2453 sbitmap blocks;
0ef90296
ZD
2454 edge_iterator ei;
2455 edge e;
4e3825db
MM
2456 unsigned i;
2457
2458 rewrite_out_of_ssa (&SA);
2459 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
2460 sizeof (rtx));
242229bb 2461
4586b4ca
SB
2462 /* Some backends want to know that we are expanding to RTL. */
2463 currently_expanding_to_rtl = 1;
2464
bf08ebeb
JH
2465 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2466
55e092c4 2467 insn_locators_alloc ();
fe8a7779 2468 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
2469 {
2470 /* Eventually, all FEs should explicitly set function_start_locus. */
2471 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2472 set_curr_insn_source_location
2473 (DECL_SOURCE_LOCATION (current_function_decl));
2474 else
2475 set_curr_insn_source_location (cfun->function_start_locus);
2476 }
55e092c4
JH
2477 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2478 prologue_locator = curr_insn_locator ();
2479
2480 /* Make sure first insn is a note even if we don't want linenums.
2481 This makes sure the first insn will never be deleted.
2482 Also, final expects a note to appear there. */
2483 emit_note (NOTE_INSN_DELETED);
6429e3be 2484
a1b23b2f
UW
2485 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2486 discover_nonconstant_array_refs ();
2487
e41b2a33 2488 targetm.expand_to_rtl_hook ();
cb91fab0 2489 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f
L
2490 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2491 crtl->stack_alignment_estimated = STACK_BOUNDARY;
cb91fab0
JH
2492 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2493 cfun->cfg->max_jumptable_ents = 0;
2494
e41b2a33 2495
727a31fa 2496 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
2497 expand_used_vars ();
2498
7d69de61
RH
2499 /* Honor stack protection warnings. */
2500 if (warn_stack_protect)
2501 {
e3b5732b 2502 if (cfun->calls_alloca)
c5409249
MLI
2503 warning (OPT_Wstack_protector,
2504 "not protecting local variables: variable length buffer");
cb91fab0 2505 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
2506 warning (OPT_Wstack_protector,
2507 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
2508 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2509 }
2510
242229bb 2511 /* Set up parameters and prepare for return, for the function. */
b79c5284 2512 expand_function_start (current_function_decl);
242229bb 2513
4e3825db
MM
2514 /* Now that we also have the parameter RTXs, copy them over to our
2515 partitions. */
2516 for (i = 0; i < SA.map->num_partitions; i++)
2517 {
2518 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
2519
2520 if (TREE_CODE (var) != VAR_DECL
2521 && !SA.partition_to_pseudo[i])
2522 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
2523 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
2524
2525 /* If this decl was marked as living in multiple places, reset
2526 this now to NULL. */
2527 if (DECL_RTL_IF_SET (var) == pc_rtx)
2528 SET_DECL_RTL (var, NULL);
2529
4e3825db
MM
2530 /* Some RTL parts really want to look at DECL_RTL(x) when x
2531 was a decl marked in REG_ATTR or MEM_ATTR. We could use
2532 SET_DECL_RTL here making this available, but that would mean
2533 to select one of the potentially many RTLs for one DECL. Instead
2534 of doing that we simply reset the MEM_EXPR of the RTL in question,
2535 then nobody can get at it and hence nobody can call DECL_RTL on it. */
2536 if (!DECL_RTL_SET_P (var))
2537 {
2538 if (MEM_P (SA.partition_to_pseudo[i]))
2539 set_mem_expr (SA.partition_to_pseudo[i], NULL);
2540 }
2541 }
2542
242229bb
JH
2543 /* If this function is `main', emit a call to `__main'
2544 to run global initializers, etc. */
2545 if (DECL_NAME (current_function_decl)
2546 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2547 && DECL_FILE_SCOPE_P (current_function_decl))
2548 expand_main_function ();
2549
7d69de61
RH
2550 /* Initialize the stack_protect_guard field. This must happen after the
2551 call to __main (if any) so that the external decl is initialized. */
cb91fab0 2552 if (crtl->stack_protect_guard)
7d69de61
RH
2553 stack_protect_prologue ();
2554
e939805b
L
2555 /* Update stack boundary if needed. */
2556 if (SUPPORTS_STACK_ALIGNMENT)
2557 {
2558 /* Call update_stack_boundary here to update incoming stack
2559 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2560 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2561 incoming stack alignment to check if it is OK to perform
2562 sibcall optimization since sibcall optimization will only
2563 align the outgoing stack to incoming stack boundary. */
2564 if (targetm.calls.update_stack_boundary)
2565 targetm.calls.update_stack_boundary ();
2566
2567 /* The incoming stack frame has to be aligned at least at
2568 parm_stack_boundary. */
2569 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2570 }
2571
4e3825db
MM
2572 expand_phi_nodes (&SA);
2573
3fbd86b1 2574 /* Register rtl specific functions for cfg. */
242229bb
JH
2575 rtl_register_cfg_hooks ();
2576
2577 init_block = construct_init_block ();
2578
0ef90296 2579 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 2580 remaining edges later. */
0ef90296
ZD
2581 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2582 e->flags &= ~EDGE_EXECUTABLE;
2583
8b11009b 2584 lab_rtx_for_bb = pointer_map_create ();
242229bb 2585 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 2586 bb = expand_gimple_basic_block (bb);
bf08ebeb 2587
4e3825db
MM
2588 execute_free_datastructures ();
2589 finish_out_of_ssa (&SA);
2590
bf08ebeb
JH
2591 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2592 conservatively to true until they are all profile aware. */
8b11009b 2593 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 2594 free_histograms ();
242229bb
JH
2595
2596 construct_exit_block ();
55e092c4
JH
2597 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2598 insn_locators_finalize ();
242229bb 2599
e8a2a782 2600 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
242229bb 2601 convert_from_eh_region_ranges ();
e8a2a782 2602 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
2603
2604 rebuild_jump_labels (get_insns ());
2605 find_exception_handler_labels ();
2606
4e3825db
MM
2607 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2608 {
2609 edge e;
2610 edge_iterator ei;
2611 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2612 {
2613 if (e->insns.r)
2614 commit_one_edge_insertion (e);
2615 else
2616 ei_next (&ei);
2617 }
2618 }
2619
2620 /* We're done expanding trees to RTL. */
2621 currently_expanding_to_rtl = 0;
2622
2623 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
2624 {
2625 edge e;
2626 edge_iterator ei;
2627 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2628 {
2629 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
2630 e->flags &= ~EDGE_EXECUTABLE;
2631
2632 /* At the moment not all abnormal edges match the RTL
2633 representation. It is safe to remove them here as
2634 find_many_sub_basic_blocks will rediscover them.
2635 In the future we should get this fixed properly. */
2636 if ((e->flags & EDGE_ABNORMAL)
2637 && !(e->flags & EDGE_SIBCALL))
2638 remove_edge (e);
2639 else
2640 ei_next (&ei);
2641 }
2642 }
2643
242229bb
JH
2644 blocks = sbitmap_alloc (last_basic_block);
2645 sbitmap_ones (blocks);
2646 find_many_sub_basic_blocks (blocks);
242229bb 2647 sbitmap_free (blocks);
4e3825db 2648 purge_all_dead_edges ();
242229bb
JH
2649
2650 compact_blocks ();
2e3f842f
L
2651
2652 expand_stack_alignment ();
2653
242229bb 2654#ifdef ENABLE_CHECKING
62e5bf5d 2655 verify_flow_info ();
242229bb 2656#endif
9f8628ba
PB
2657
2658 /* There's no need to defer outputting this function any more; we
2659 know we want to output it. */
2660 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2661
2662 /* Now that we're done expanding trees to RTL, we shouldn't have any
2663 more CONCATs anywhere. */
2664 generating_concat_p = 0;
2665
b7211528
SB
2666 if (dump_file)
2667 {
2668 fprintf (dump_file,
2669 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2670 /* And the pass manager will dump RTL for us. */
2671 }
ef330312
PB
2672
2673 /* If we're emitting a nested function, make sure its parent gets
2674 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 2675 {
ef330312
PB
2676 tree parent;
2677 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
2678 parent != NULL_TREE;
2679 parent = get_containing_scope (parent))
ef330312 2680 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 2681 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 2682 }
c22cacf3 2683
ef330312
PB
2684 /* We are now committed to emitting code for this function. Do any
2685 preparation, such as emitting abstract debug info for the inline
2686 before it gets mangled by optimization. */
2687 if (cgraph_function_possibly_inlined_p (current_function_decl))
2688 (*debug_hooks->outlining_inline_function) (current_function_decl);
2689
2690 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
2691
2692 /* After expanding, the return labels are no longer needed. */
2693 return_label = NULL;
2694 naked_return_label = NULL;
55e092c4
JH
2695 /* Tag the blocks with a depth number so that change_scope can find
2696 the common parent easily. */
2697 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 2698 default_rtl_profile ();
c2924966 2699 return 0;
242229bb
JH
2700}
2701
e3b5732b 2702struct rtl_opt_pass pass_expand =
242229bb 2703{
8ddbbcae 2704 {
e3b5732b 2705 RTL_PASS,
c22cacf3 2706 "expand", /* name */
242229bb 2707 NULL, /* gate */
726a989a 2708 gimple_expand_cfg, /* execute */
242229bb
JH
2709 NULL, /* sub */
2710 NULL, /* next */
2711 0, /* static_pass_number */
c22cacf3 2712 TV_EXPAND, /* tv_id */
4e3825db 2713 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
242229bb 2714 PROP_rtl, /* properties_provided */
4e3825db
MM
2715 PROP_ssa | PROP_trees, /* properties_destroyed */
2716 TODO_verify_ssa | TODO_verify_flow
2717 | TODO_verify_stmts, /* todo_flags_start */
2718 TODO_dump_func
2719 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 2720 }
242229bb 2721};