]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/cfgexpand.c
re PR c/39902 (x * 1.0DF gets wrong value)
[thirdparty/gcc.git] / gcc / cfgexpand.c
CommitLineData
242229bb 1/* A pass for lowering trees to RTL.
66647d44
JJ
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009
3 Free Software Foundation, Inc.
242229bb
JH
4
5This file is part of GCC.
6
7GCC is free software; you can redistribute it and/or modify
8it under the terms of the GNU General Public License as published by
9dcd6f09 9the Free Software Foundation; either version 3, or (at your option)
242229bb
JH
10any later version.
11
12GCC is distributed in the hope that it will be useful,
13but WITHOUT ANY WARRANTY; without even the implied warranty of
14MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15GNU General Public License for more details.
16
17You should have received a copy of the GNU General Public License
9dcd6f09
NC
18along with GCC; see the file COPYING3. If not see
19<http://www.gnu.org/licenses/>. */
242229bb
JH
20
21#include "config.h"
22#include "system.h"
23#include "coretypes.h"
24#include "tm.h"
25#include "tree.h"
26#include "rtl.h"
27#include "tm_p.h"
28#include "basic-block.h"
29#include "function.h"
30#include "expr.h"
31#include "langhooks.h"
32#include "tree-flow.h"
33#include "timevar.h"
34#include "tree-dump.h"
35#include "tree-pass.h"
36#include "except.h"
37#include "flags.h"
1f6d3a08
RH
38#include "diagnostic.h"
39#include "toplev.h"
ef330312 40#include "debug.h"
7d69de61 41#include "params.h"
ff28a94d 42#include "tree-inline.h"
6946b3f7 43#include "value-prof.h"
e41b2a33 44#include "target.h"
4e3825db 45#include "ssaexpand.h"
7d69de61 46
726a989a 47
4e3825db
MM
48/* This variable holds information helping the rewriting of SSA trees
49 into RTL. */
50struct ssaexpand SA;
51
726a989a
RB
52/* Return an expression tree corresponding to the RHS of GIMPLE
53 statement STMT. */
54
55tree
56gimple_assign_rhs_to_tree (gimple stmt)
57{
58 tree t;
82d6e6fc 59 enum gimple_rhs_class grhs_class;
726a989a 60
82d6e6fc 61 grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt));
726a989a 62
82d6e6fc 63 if (grhs_class == GIMPLE_BINARY_RHS)
726a989a
RB
64 t = build2 (gimple_assign_rhs_code (stmt),
65 TREE_TYPE (gimple_assign_lhs (stmt)),
66 gimple_assign_rhs1 (stmt),
67 gimple_assign_rhs2 (stmt));
82d6e6fc 68 else if (grhs_class == GIMPLE_UNARY_RHS)
726a989a
RB
69 t = build1 (gimple_assign_rhs_code (stmt),
70 TREE_TYPE (gimple_assign_lhs (stmt)),
71 gimple_assign_rhs1 (stmt));
82d6e6fc 72 else if (grhs_class == GIMPLE_SINGLE_RHS)
726a989a
RB
73 t = gimple_assign_rhs1 (stmt);
74 else
75 gcc_unreachable ();
76
77 return t;
78}
79
80/* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND
81 statement STMT. */
82
83static tree
84gimple_cond_pred_to_tree (gimple stmt)
85{
4e3825db
MM
86 /* We're sometimes presented with such code:
87 D.123_1 = x < y;
88 if (D.123_1 != 0)
89 ...
90 This would expand to two comparisons which then later might
91 be cleaned up by combine. But some pattern matchers like if-conversion
92 work better when there's only one compare, so make up for this
93 here as special exception if TER would have made the same change. */
94 tree lhs = gimple_cond_lhs (stmt);
95 if (SA.values
96 && TREE_CODE (lhs) == SSA_NAME
e97809c6
MM
97 && bitmap_bit_p (SA.values, SSA_NAME_VERSION (lhs)))
98 lhs = gimple_assign_rhs_to_tree (SSA_NAME_DEF_STMT (lhs));
4e3825db 99
726a989a 100 return build2 (gimple_cond_code (stmt), boolean_type_node,
4e3825db 101 lhs, gimple_cond_rhs (stmt));
726a989a
RB
102}
103
104/* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression
105 inside *TP. DATA is the location to set. */
106
107static tree
108set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data)
109{
110 location_t *loc = (location_t *) data;
111 if (EXPR_P (*tp))
112 SET_EXPR_LOCATION (*tp, *loc);
113
114 return NULL_TREE;
115}
116
117
118/* RTL expansion has traditionally been done on trees, so the
119 transition to doing it on GIMPLE tuples is very invasive to the RTL
120 expander. To facilitate the transition, this function takes a
121 GIMPLE tuple STMT and returns the same statement in the form of a
122 tree. */
123
124static tree
125gimple_to_tree (gimple stmt)
126{
127 tree t;
128 int rn;
129 tree_ann_common_t ann;
130 location_t loc;
131
132 switch (gimple_code (stmt))
133 {
134 case GIMPLE_ASSIGN:
135 {
136 tree lhs = gimple_assign_lhs (stmt);
137
138 t = gimple_assign_rhs_to_tree (stmt);
139 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
140 if (gimple_assign_nontemporal_move_p (stmt))
141 MOVE_NONTEMPORAL (t) = true;
142 }
143 break;
144
145 case GIMPLE_COND:
146 t = gimple_cond_pred_to_tree (stmt);
147 t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE);
148 break;
149
150 case GIMPLE_GOTO:
151 t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt));
152 break;
153
154 case GIMPLE_LABEL:
155 t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt));
156 break;
157
158 case GIMPLE_RETURN:
159 {
160 tree retval = gimple_return_retval (stmt);
161
162 if (retval && retval != error_mark_node)
163 {
164 tree result = DECL_RESULT (current_function_decl);
165
166 /* If we are not returning the current function's RESULT_DECL,
167 build an assignment to it. */
168 if (retval != result)
169 {
170 /* I believe that a function's RESULT_DECL is unique. */
171 gcc_assert (TREE_CODE (retval) != RESULT_DECL);
172
173 retval = build2 (MODIFY_EXPR, TREE_TYPE (result),
174 result, retval);
175 }
176 }
177 t = build1 (RETURN_EXPR, void_type_node, retval);
178 }
179 break;
180
181 case GIMPLE_ASM:
182 {
183 size_t i, n;
184 tree out, in, cl;
185 const char *s;
186
187 out = NULL_TREE;
188 n = gimple_asm_noutputs (stmt);
189 if (n > 0)
190 {
191 t = out = gimple_asm_output_op (stmt, 0);
192 for (i = 1; i < n; i++)
193 {
194 TREE_CHAIN (t) = gimple_asm_output_op (stmt, i);
195 t = gimple_asm_output_op (stmt, i);
196 }
197 }
198
199 in = NULL_TREE;
200 n = gimple_asm_ninputs (stmt);
201 if (n > 0)
202 {
203 t = in = gimple_asm_input_op (stmt, 0);
204 for (i = 1; i < n; i++)
205 {
206 TREE_CHAIN (t) = gimple_asm_input_op (stmt, i);
207 t = gimple_asm_input_op (stmt, i);
208 }
209 }
210
211 cl = NULL_TREE;
212 n = gimple_asm_nclobbers (stmt);
213 if (n > 0)
214 {
215 t = cl = gimple_asm_clobber_op (stmt, 0);
216 for (i = 1; i < n; i++)
217 {
218 TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i);
219 t = gimple_asm_clobber_op (stmt, i);
220 }
221 }
222
223 s = gimple_asm_string (stmt);
224 t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s),
225 out, in, cl);
226 ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt);
227 ASM_INPUT_P (t) = gimple_asm_input_p (stmt);
228 }
229 break;
230
231 case GIMPLE_CALL:
232 {
233 size_t i;
234 tree fn;
235 tree_ann_common_t ann;
236
237 t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3);
238
7c9577be 239 CALL_EXPR_FN (t) = gimple_call_fn (stmt);
726a989a 240 TREE_TYPE (t) = gimple_call_return_type (stmt);
726a989a
RB
241 CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt);
242
243 for (i = 0; i < gimple_call_num_args (stmt); i++)
244 CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i);
245
246 if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE)))
247 TREE_SIDE_EFFECTS (t) = 1;
248
249 if (gimple_call_flags (stmt) & ECF_NOTHROW)
250 TREE_NOTHROW (t) = 1;
251
252 CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt);
253 CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt);
254 CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt);
255 CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt);
256 CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt);
257
258 /* If the call has a LHS then create a MODIFY_EXPR to hold it. */
259 {
260 tree lhs = gimple_call_lhs (stmt);
261
262 if (lhs)
263 t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t);
264 }
265
266 /* Record the original call statement, as it may be used
267 to retrieve profile information during expansion. */
7c9577be
RG
268
269 if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE
270 && DECL_BUILT_IN (fn))
726a989a
RB
271 {
272 ann = get_tree_common_ann (t);
273 ann->stmt = stmt;
274 }
275 }
276 break;
277
278 case GIMPLE_SWITCH:
279 {
280 tree label_vec;
281 size_t i;
282 tree elt = gimple_switch_label (stmt, 0);
283
284 label_vec = make_tree_vec (gimple_switch_num_labels (stmt));
285
286 if (!CASE_LOW (elt) && !CASE_HIGH (elt))
287 {
288 for (i = 1; i < gimple_switch_num_labels (stmt); i++)
289 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i);
290
291 /* The default case in a SWITCH_EXPR must be at the end of
292 the label vector. */
293 TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0);
294 }
295 else
296 {
297 for (i = 0; i < gimple_switch_num_labels (stmt); i++)
298 TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i);
299 }
300
301 t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt),
302 NULL, label_vec);
303 }
304 break;
305
306 case GIMPLE_NOP:
307 case GIMPLE_PREDICT:
308 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
309 break;
310
311 case GIMPLE_RESX:
312 t = build_resx (gimple_resx_region (stmt));
313 break;
314
315 default:
316 if (errorcount == 0)
317 {
318 error ("Unrecognized GIMPLE statement during RTL expansion");
319 print_gimple_stmt (stderr, stmt, 4, 0);
320 gcc_unreachable ();
321 }
322 else
323 {
324 /* Ignore any bad gimple codes if we're going to die anyhow,
325 so we can at least set TREE_ASM_WRITTEN and have the rest
326 of compilation advance without sudden ICE death. */
327 t = build1 (NOP_EXPR, void_type_node, size_zero_node);
328 break;
329 }
330 }
331
332 /* If STMT is inside an exception region, record it in the generated
333 expression. */
334 rn = lookup_stmt_eh_region (stmt);
335 if (rn >= 0)
336 {
337 tree call = get_call_expr_in (t);
338
339 ann = get_tree_common_ann (t);
340 ann->rn = rn;
341
342 /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up
343 the CALL_EXPR not the assignment statment for EH region number. */
344 if (call && call != t)
345 {
346 ann = get_tree_common_ann (call);
347 ann->rn = rn;
348 }
349 }
350
351 /* Set EXPR_LOCATION in all the embedded expressions. */
352 loc = gimple_location (stmt);
353 walk_tree (&t, set_expr_location_r, (void *) &loc, NULL);
354
355 TREE_BLOCK (t) = gimple_block (stmt);
356
357 return t;
358}
359
360
361/* Release back to GC memory allocated by gimple_to_tree. */
362
363static void
364release_stmt_tree (gimple stmt, tree stmt_tree)
365{
366 tree_ann_common_t ann;
367
368 switch (gimple_code (stmt))
369 {
370 case GIMPLE_ASSIGN:
371 if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS)
372 ggc_free (TREE_OPERAND (stmt_tree, 1));
373 break;
374 case GIMPLE_COND:
375 ggc_free (COND_EXPR_COND (stmt_tree));
376 break;
377 case GIMPLE_RETURN:
378 if (TREE_OPERAND (stmt_tree, 0)
379 && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR)
380 ggc_free (TREE_OPERAND (stmt_tree, 0));
381 break;
382 case GIMPLE_CALL:
383 if (gimple_call_lhs (stmt))
384 {
726a989a
RB
385 ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1));
386 if (ann)
387 ggc_free (ann);
388 ggc_free (TREE_OPERAND (stmt_tree, 1));
389 }
726a989a
RB
390 break;
391 default:
392 break;
393 }
394 ann = tree_common_ann (stmt_tree);
395 if (ann)
396 ggc_free (ann);
397 ggc_free (stmt_tree);
398}
399
400
e53de54d
JH
401/* Verify that there is exactly single jump instruction since last and attach
402 REG_BR_PROB note specifying probability.
403 ??? We really ought to pass the probability down to RTL expanders and let it
d7e9e62a
KH
404 re-distribute it when the conditional expands into multiple conditionals.
405 This is however difficult to do. */
ef950eba 406void
10d22567 407add_reg_br_prob_note (rtx last, int probability)
e53de54d
JH
408{
409 if (profile_status == PROFILE_ABSENT)
410 return;
411 for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last))
2ca202e7 412 if (JUMP_P (last))
e53de54d
JH
413 {
414 /* It is common to emit condjump-around-jump sequence when we don't know
415 how to reverse the conditional. Special case this. */
416 if (!any_condjump_p (last)
2ca202e7 417 || !JUMP_P (NEXT_INSN (last))
e53de54d 418 || !simplejump_p (NEXT_INSN (last))
fa1ff4eb 419 || !NEXT_INSN (NEXT_INSN (last))
2ca202e7 420 || !BARRIER_P (NEXT_INSN (NEXT_INSN (last)))
fa1ff4eb 421 || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))
2ca202e7 422 || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))
e53de54d
JH
423 || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))))
424 goto failed;
41806d92 425 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6
ILT
426 add_reg_note (last, REG_BR_PROB,
427 GEN_INT (REG_BR_PROB_BASE - probability));
e53de54d
JH
428 return;
429 }
2ca202e7 430 if (!last || !JUMP_P (last) || !any_condjump_p (last))
41806d92
NS
431 goto failed;
432 gcc_assert (!find_reg_note (last, REG_BR_PROB, 0));
65c5f2a6 433 add_reg_note (last, REG_BR_PROB, GEN_INT (probability));
e53de54d
JH
434 return;
435failed:
436 if (dump_file)
437 fprintf (dump_file, "Failed to add probability note\n");
438}
439
80c7a9eb 440
1f6d3a08
RH
441#ifndef STACK_ALIGNMENT_NEEDED
442#define STACK_ALIGNMENT_NEEDED 1
443#endif
444
4e3825db
MM
445#define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
446
447/* Associate declaration T with storage space X. If T is no
448 SSA name this is exactly SET_DECL_RTL, otherwise make the
449 partition of T associated with X. */
450static inline void
451set_rtl (tree t, rtx x)
452{
453 if (TREE_CODE (t) == SSA_NAME)
454 {
455 SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x;
456 if (x && !MEM_P (x))
457 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x);
eb7adebc
MM
458 /* For the benefit of debug information at -O0 (where vartracking
459 doesn't run) record the place also in the base DECL if it's
460 a normal variable (not a parameter). */
461 if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL)
462 {
463 tree var = SSA_NAME_VAR (t);
464 /* If we don't yet have something recorded, just record it now. */
465 if (!DECL_RTL_SET_P (var))
466 SET_DECL_RTL (var, x);
467 /* If we have it set alrady to "multiple places" don't
468 change this. */
469 else if (DECL_RTL (var) == pc_rtx)
470 ;
471 /* If we have something recorded and it's not the same place
472 as we want to record now, we have multiple partitions for the
473 same base variable, with different places. We can't just
474 randomly chose one, hence we have to say that we don't know.
475 This only happens with optimization, and there var-tracking
476 will figure out the right thing. */
477 else if (DECL_RTL (var) != x)
478 SET_DECL_RTL (var, pc_rtx);
479 }
4e3825db
MM
480 }
481 else
482 SET_DECL_RTL (t, x);
483}
1f6d3a08
RH
484
485/* This structure holds data relevant to one variable that will be
486 placed in a stack slot. */
487struct stack_var
488{
489 /* The Variable. */
490 tree decl;
491
492 /* The offset of the variable. During partitioning, this is the
493 offset relative to the partition. After partitioning, this
494 is relative to the stack frame. */
495 HOST_WIDE_INT offset;
496
497 /* Initially, the size of the variable. Later, the size of the partition,
498 if this variable becomes it's partition's representative. */
499 HOST_WIDE_INT size;
500
501 /* The *byte* alignment required for this variable. Or as, with the
502 size, the alignment for this partition. */
503 unsigned int alignb;
504
505 /* The partition representative. */
506 size_t representative;
507
508 /* The next stack variable in the partition, or EOC. */
509 size_t next;
510};
511
512#define EOC ((size_t)-1)
513
514/* We have an array of such objects while deciding allocation. */
515static struct stack_var *stack_vars;
516static size_t stack_vars_alloc;
517static size_t stack_vars_num;
518
fa10beec 519/* An array of indices such that stack_vars[stack_vars_sorted[i]].size
1f6d3a08
RH
520 is non-decreasing. */
521static size_t *stack_vars_sorted;
522
523/* We have an interference graph between such objects. This graph
524 is lower triangular. */
525static bool *stack_vars_conflict;
526static size_t stack_vars_conflict_alloc;
527
528/* The phase of the stack frame. This is the known misalignment of
529 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
530 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
531static int frame_phase;
532
7d69de61
RH
533/* Used during expand_used_vars to remember if we saw any decls for
534 which we'd like to enable stack smashing protection. */
535static bool has_protected_decls;
536
537/* Used during expand_used_vars. Remember if we say a character buffer
538 smaller than our cutoff threshold. Used for -Wstack-protector. */
539static bool has_short_buffer;
1f6d3a08
RH
540
541/* Discover the byte alignment to use for DECL. Ignore alignment
542 we can't do with expected alignment of the stack boundary. */
543
544static unsigned int
545get_decl_align_unit (tree decl)
546{
547 unsigned int align;
548
9bfaf89d 549 align = LOCAL_DECL_ALIGNMENT (decl);
2e3f842f
L
550
551 if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
552 align = MAX_SUPPORTED_STACK_ALIGNMENT;
553
554 if (SUPPORTS_STACK_ALIGNMENT)
555 {
556 if (crtl->stack_alignment_estimated < align)
557 {
558 gcc_assert(!crtl->stack_realign_processed);
559 crtl->stack_alignment_estimated = align;
560 }
561 }
562
563 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
564 So here we only make sure stack_alignment_needed >= align. */
cb91fab0
JH
565 if (crtl->stack_alignment_needed < align)
566 crtl->stack_alignment_needed = align;
f85882d8
JY
567 if (crtl->max_used_stack_slot_alignment < align)
568 crtl->max_used_stack_slot_alignment = align;
1f6d3a08
RH
569
570 return align / BITS_PER_UNIT;
571}
572
573/* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
574 Return the frame offset. */
575
576static HOST_WIDE_INT
577alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align)
578{
579 HOST_WIDE_INT offset, new_frame_offset;
580
581 new_frame_offset = frame_offset;
582 if (FRAME_GROWS_DOWNWARD)
583 {
584 new_frame_offset -= size + frame_phase;
585 new_frame_offset &= -align;
586 new_frame_offset += frame_phase;
587 offset = new_frame_offset;
588 }
589 else
590 {
591 new_frame_offset -= frame_phase;
592 new_frame_offset += align - 1;
593 new_frame_offset &= -align;
594 new_frame_offset += frame_phase;
595 offset = new_frame_offset;
596 new_frame_offset += size;
597 }
598 frame_offset = new_frame_offset;
599
9fb798d7
EB
600 if (frame_offset_overflow (frame_offset, cfun->decl))
601 frame_offset = offset = 0;
602
1f6d3a08
RH
603 return offset;
604}
605
606/* Accumulate DECL into STACK_VARS. */
607
608static void
609add_stack_var (tree decl)
610{
611 if (stack_vars_num >= stack_vars_alloc)
612 {
613 if (stack_vars_alloc)
614 stack_vars_alloc = stack_vars_alloc * 3 / 2;
615 else
616 stack_vars_alloc = 32;
617 stack_vars
618 = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc);
619 }
620 stack_vars[stack_vars_num].decl = decl;
621 stack_vars[stack_vars_num].offset = 0;
4e3825db
MM
622 stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1);
623 stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl));
1f6d3a08
RH
624
625 /* All variables are initially in their own partition. */
626 stack_vars[stack_vars_num].representative = stack_vars_num;
627 stack_vars[stack_vars_num].next = EOC;
628
629 /* Ensure that this decl doesn't get put onto the list twice. */
4e3825db 630 set_rtl (decl, pc_rtx);
1f6d3a08
RH
631
632 stack_vars_num++;
633}
634
635/* Compute the linear index of a lower-triangular coordinate (I, J). */
636
637static size_t
638triangular_index (size_t i, size_t j)
639{
640 if (i < j)
641 {
642 size_t t;
643 t = i, i = j, j = t;
644 }
645 return (i * (i + 1)) / 2 + j;
646}
647
648/* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */
649
650static void
651resize_stack_vars_conflict (size_t n)
652{
653 size_t size = triangular_index (n-1, n-1) + 1;
654
655 if (size <= stack_vars_conflict_alloc)
656 return;
657
658 stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size);
659 memset (stack_vars_conflict + stack_vars_conflict_alloc, 0,
660 (size - stack_vars_conflict_alloc) * sizeof (bool));
661 stack_vars_conflict_alloc = size;
662}
663
664/* Make the decls associated with luid's X and Y conflict. */
665
666static void
667add_stack_var_conflict (size_t x, size_t y)
668{
669 size_t index = triangular_index (x, y);
670 gcc_assert (index < stack_vars_conflict_alloc);
671 stack_vars_conflict[index] = true;
672}
673
674/* Check whether the decls associated with luid's X and Y conflict. */
675
676static bool
677stack_var_conflict_p (size_t x, size_t y)
678{
679 size_t index = triangular_index (x, y);
680 gcc_assert (index < stack_vars_conflict_alloc);
681 return stack_vars_conflict[index];
682}
d239ed56
SB
683
684/* Returns true if TYPE is or contains a union type. */
685
686static bool
687aggregate_contains_union_type (tree type)
688{
689 tree field;
690
691 if (TREE_CODE (type) == UNION_TYPE
692 || TREE_CODE (type) == QUAL_UNION_TYPE)
693 return true;
694 if (TREE_CODE (type) == ARRAY_TYPE)
695 return aggregate_contains_union_type (TREE_TYPE (type));
696 if (TREE_CODE (type) != RECORD_TYPE)
697 return false;
698
699 for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field))
700 if (TREE_CODE (field) == FIELD_DECL)
701 if (aggregate_contains_union_type (TREE_TYPE (field)))
702 return true;
703
704 return false;
705}
706
1f6d3a08
RH
707/* A subroutine of expand_used_vars. If two variables X and Y have alias
708 sets that do not conflict, then do add a conflict for these variables
d239ed56
SB
709 in the interference graph. We also need to make sure to add conflicts
710 for union containing structures. Else RTL alias analysis comes along
711 and due to type based aliasing rules decides that for two overlapping
712 union temporaries { short s; int i; } accesses to the same mem through
713 different types may not alias and happily reorders stores across
714 life-time boundaries of the temporaries (See PR25654).
715 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
1f6d3a08
RH
716
717static void
718add_alias_set_conflicts (void)
719{
720 size_t i, j, n = stack_vars_num;
721
722 for (i = 0; i < n; ++i)
723 {
a4d25453
RH
724 tree type_i = TREE_TYPE (stack_vars[i].decl);
725 bool aggr_i = AGGREGATE_TYPE_P (type_i);
d239ed56 726 bool contains_union;
1f6d3a08 727
d239ed56 728 contains_union = aggregate_contains_union_type (type_i);
1f6d3a08
RH
729 for (j = 0; j < i; ++j)
730 {
a4d25453
RH
731 tree type_j = TREE_TYPE (stack_vars[j].decl);
732 bool aggr_j = AGGREGATE_TYPE_P (type_j);
d239ed56
SB
733 if (aggr_i != aggr_j
734 /* Either the objects conflict by means of type based
735 aliasing rules, or we need to add a conflict. */
736 || !objects_must_conflict_p (type_i, type_j)
737 /* In case the types do not conflict ensure that access
738 to elements will conflict. In case of unions we have
739 to be careful as type based aliasing rules may say
740 access to the same memory does not conflict. So play
741 safe and add a conflict in this case. */
742 || contains_union)
1f6d3a08
RH
743 add_stack_var_conflict (i, j);
744 }
745 }
746}
747
748/* A subroutine of partition_stack_vars. A comparison function for qsort,
4e3825db 749 sorting an array of indices by the size and type of the object. */
1f6d3a08
RH
750
751static int
752stack_var_size_cmp (const void *a, const void *b)
753{
754 HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size;
755 HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size;
4e3825db
MM
756 tree decla, declb;
757 unsigned int uida, uidb;
1f6d3a08
RH
758
759 if (sa < sb)
760 return -1;
761 if (sa > sb)
762 return 1;
4e3825db
MM
763 decla = stack_vars[*(const size_t *)a].decl;
764 declb = stack_vars[*(const size_t *)b].decl;
765 /* For stack variables of the same size use and id of the decls
766 to make the sort stable. Two SSA names are compared by their
767 version, SSA names come before non-SSA names, and two normal
768 decls are compared by their DECL_UID. */
769 if (TREE_CODE (decla) == SSA_NAME)
770 {
771 if (TREE_CODE (declb) == SSA_NAME)
772 uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb);
773 else
774 return -1;
775 }
776 else if (TREE_CODE (declb) == SSA_NAME)
777 return 1;
778 else
779 uida = DECL_UID (decla), uidb = DECL_UID (declb);
79f802f5
RG
780 if (uida < uidb)
781 return -1;
782 if (uida > uidb)
783 return 1;
1f6d3a08
RH
784 return 0;
785}
786
55b34b5f
RG
787
788/* If the points-to solution *PI points to variables that are in a partition
789 together with other variables add all partition members to the pointed-to
790 variables bitmap. */
791
792static void
793add_partitioned_vars_to_ptset (struct pt_solution *pt,
794 struct pointer_map_t *decls_to_partitions,
795 struct pointer_set_t *visited, bitmap temp)
796{
797 bitmap_iterator bi;
798 unsigned i;
799 bitmap *part;
800
801 if (pt->anything
802 || pt->vars == NULL
803 /* The pointed-to vars bitmap is shared, it is enough to
804 visit it once. */
805 || pointer_set_insert(visited, pt->vars))
806 return;
807
808 bitmap_clear (temp);
809
810 /* By using a temporary bitmap to store all members of the partitions
811 we have to add we make sure to visit each of the partitions only
812 once. */
813 EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi)
814 if ((!temp
815 || !bitmap_bit_p (temp, i))
816 && (part = (bitmap *) pointer_map_contains (decls_to_partitions,
817 (void *)(size_t) i)))
818 bitmap_ior_into (temp, *part);
819 if (!bitmap_empty_p (temp))
820 bitmap_ior_into (pt->vars, temp);
821}
822
823/* Update points-to sets based on partition info, so we can use them on RTL.
824 The bitmaps representing stack partitions will be saved until expand,
825 where partitioned decls used as bases in memory expressions will be
826 rewritten. */
827
828static void
829update_alias_info_with_stack_vars (void)
830{
831 struct pointer_map_t *decls_to_partitions = NULL;
832 size_t i, j;
833 tree var = NULL_TREE;
834
835 for (i = 0; i < stack_vars_num; i++)
836 {
837 bitmap part = NULL;
838 tree name;
839 struct ptr_info_def *pi;
840
841 /* Not interested in partitions with single variable. */
842 if (stack_vars[i].representative != i
843 || stack_vars[i].next == EOC)
844 continue;
845
846 if (!decls_to_partitions)
847 {
848 decls_to_partitions = pointer_map_create ();
849 cfun->gimple_df->decls_to_pointers = pointer_map_create ();
850 }
851
852 /* Create an SSA_NAME that points to the partition for use
853 as base during alias-oracle queries on RTL for bases that
854 have been partitioned. */
855 if (var == NULL_TREE)
856 var = create_tmp_var (ptr_type_node, NULL);
857 name = make_ssa_name (var, NULL);
858
859 /* Create bitmaps representing partitions. They will be used for
860 points-to sets later, so use GGC alloc. */
861 part = BITMAP_GGC_ALLOC ();
862 for (j = i; j != EOC; j = stack_vars[j].next)
863 {
864 tree decl = stack_vars[j].decl;
865 unsigned int uid = DECL_UID (decl);
866 /* We should never end up partitioning SSA names (though they
867 may end up on the stack). Neither should we allocate stack
868 space to something that is unused and thus unreferenced. */
869 gcc_assert (DECL_P (decl)
870 && referenced_var_lookup (uid));
871 bitmap_set_bit (part, uid);
872 *((bitmap *) pointer_map_insert (decls_to_partitions,
873 (void *)(size_t) uid)) = part;
874 *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers,
875 decl)) = name;
876 }
877
878 /* Make the SSA name point to all partition members. */
879 pi = get_ptr_info (name);
880 pt_solution_set (&pi->pt, part);
881 }
882
883 /* Make all points-to sets that contain one member of a partition
884 contain all members of the partition. */
885 if (decls_to_partitions)
886 {
887 unsigned i;
888 struct pointer_set_t *visited = pointer_set_create ();
889 bitmap temp = BITMAP_ALLOC (NULL);
890
891 for (i = 1; i < num_ssa_names; i++)
892 {
893 tree name = ssa_name (i);
894 struct ptr_info_def *pi;
895
896 if (name
897 && POINTER_TYPE_P (TREE_TYPE (name))
898 && ((pi = SSA_NAME_PTR_INFO (name)) != NULL))
899 add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions,
900 visited, temp);
901 }
902
903 add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped,
904 decls_to_partitions, visited, temp);
905 add_partitioned_vars_to_ptset (&cfun->gimple_df->callused,
906 decls_to_partitions, visited, temp);
907
908 pointer_set_destroy (visited);
909 pointer_map_destroy (decls_to_partitions);
910 BITMAP_FREE (temp);
911 }
912}
913
1f6d3a08
RH
914/* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
915 partitioning algorithm. Partitions A and B are known to be non-conflicting.
916 Merge them into a single partition A.
917
918 At the same time, add OFFSET to all variables in partition B. At the end
919 of the partitioning process we've have a nice block easy to lay out within
920 the stack frame. */
921
922static void
923union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset)
924{
925 size_t i, last;
926
927 /* Update each element of partition B with the given offset,
928 and merge them into partition A. */
929 for (last = i = b; i != EOC; last = i, i = stack_vars[i].next)
930 {
931 stack_vars[i].offset += offset;
932 stack_vars[i].representative = a;
933 }
934 stack_vars[last].next = stack_vars[a].next;
935 stack_vars[a].next = b;
936
937 /* Update the required alignment of partition A to account for B. */
938 if (stack_vars[a].alignb < stack_vars[b].alignb)
939 stack_vars[a].alignb = stack_vars[b].alignb;
940
941 /* Update the interference graph and merge the conflicts. */
942 for (last = stack_vars_num, i = 0; i < last; ++i)
943 if (stack_var_conflict_p (b, i))
944 add_stack_var_conflict (a, i);
945}
946
947/* A subroutine of expand_used_vars. Binpack the variables into
948 partitions constrained by the interference graph. The overall
949 algorithm used is as follows:
950
951 Sort the objects by size.
952 For each object A {
953 S = size(A)
954 O = 0
955 loop {
956 Look for the largest non-conflicting object B with size <= S.
957 UNION (A, B)
958 offset(B) = O
959 O += size(B)
960 S -= size(B)
961 }
962 }
963*/
964
965static void
966partition_stack_vars (void)
967{
968 size_t si, sj, n = stack_vars_num;
969
970 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
971 for (si = 0; si < n; ++si)
972 stack_vars_sorted[si] = si;
973
974 if (n == 1)
975 return;
976
977 qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp);
978
979 /* Special case: detect when all variables conflict, and thus we can't
980 do anything during the partitioning loop. It isn't uncommon (with
981 C code at least) to declare all variables at the top of the function,
982 and if we're not inlining, then all variables will be in the same scope.
983 Take advantage of very fast libc routines for this scan. */
984 gcc_assert (sizeof(bool) == sizeof(char));
985 if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL)
986 return;
987
988 for (si = 0; si < n; ++si)
989 {
990 size_t i = stack_vars_sorted[si];
991 HOST_WIDE_INT isize = stack_vars[i].size;
992 HOST_WIDE_INT offset = 0;
993
994 for (sj = si; sj-- > 0; )
995 {
996 size_t j = stack_vars_sorted[sj];
997 HOST_WIDE_INT jsize = stack_vars[j].size;
998 unsigned int jalign = stack_vars[j].alignb;
999
1000 /* Ignore objects that aren't partition representatives. */
1001 if (stack_vars[j].representative != j)
1002 continue;
1003
1004 /* Ignore objects too large for the remaining space. */
1005 if (isize < jsize)
1006 continue;
1007
1008 /* Ignore conflicting objects. */
1009 if (stack_var_conflict_p (i, j))
1010 continue;
1011
1012 /* Refine the remaining space check to include alignment. */
1013 if (offset & (jalign - 1))
1014 {
1015 HOST_WIDE_INT toff = offset;
1016 toff += jalign - 1;
1017 toff &= -(HOST_WIDE_INT)jalign;
1018 if (isize - (toff - offset) < jsize)
1019 continue;
1020
1021 isize -= toff - offset;
1022 offset = toff;
1023 }
1024
1025 /* UNION the objects, placing J at OFFSET. */
1026 union_stack_vars (i, j, offset);
1027
1028 isize -= jsize;
1029 if (isize == 0)
1030 break;
1031 }
1032 }
55b34b5f 1033
0b200b80
RG
1034 if (optimize)
1035 update_alias_info_with_stack_vars ();
1f6d3a08
RH
1036}
1037
1038/* A debugging aid for expand_used_vars. Dump the generated partitions. */
1039
1040static void
1041dump_stack_var_partition (void)
1042{
1043 size_t si, i, j, n = stack_vars_num;
1044
1045 for (si = 0; si < n; ++si)
1046 {
1047 i = stack_vars_sorted[si];
1048
1049 /* Skip variables that aren't partition representatives, for now. */
1050 if (stack_vars[i].representative != i)
1051 continue;
1052
1053 fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
1054 " align %u\n", (unsigned long) i, stack_vars[i].size,
1055 stack_vars[i].alignb);
1056
1057 for (j = i; j != EOC; j = stack_vars[j].next)
1058 {
1059 fputc ('\t', dump_file);
1060 print_generic_expr (dump_file, stack_vars[j].decl, dump_flags);
1061 fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n",
1c50a20a 1062 stack_vars[j].offset);
1f6d3a08
RH
1063 }
1064 }
1065}
1066
1067/* Assign rtl to DECL at frame offset OFFSET. */
1068
1069static void
1070expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset)
1071{
2ac26e15
L
1072 /* Alignment is unsigned. */
1073 unsigned HOST_WIDE_INT align;
1f6d3a08 1074 rtx x;
c22cacf3 1075
1f6d3a08
RH
1076 /* If this fails, we've overflowed the stack frame. Error nicely? */
1077 gcc_assert (offset == trunc_int_for_mode (offset, Pmode));
1078
1079 x = plus_constant (virtual_stack_vars_rtx, offset);
4e3825db 1080 x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x);
1f6d3a08 1081
4e3825db
MM
1082 if (TREE_CODE (decl) != SSA_NAME)
1083 {
1084 /* Set alignment we actually gave this decl if it isn't an SSA name.
1085 If it is we generate stack slots only accidentally so it isn't as
1086 important, we'll simply use the alignment that is already set. */
1087 offset -= frame_phase;
1088 align = offset & -offset;
1089 align *= BITS_PER_UNIT;
1090 if (align == 0)
1091 align = STACK_BOUNDARY;
1092 else if (align > MAX_SUPPORTED_STACK_ALIGNMENT)
1093 align = MAX_SUPPORTED_STACK_ALIGNMENT;
1094
1095 DECL_ALIGN (decl) = align;
1096 DECL_USER_ALIGN (decl) = 0;
1097 }
1098
1099 set_mem_attributes (x, SSAVAR (decl), true);
1100 set_rtl (decl, x);
1f6d3a08
RH
1101}
1102
1103/* A subroutine of expand_used_vars. Give each partition representative
1104 a unique location within the stack frame. Update each partition member
1105 with that location. */
1106
1107static void
7d69de61 1108expand_stack_vars (bool (*pred) (tree))
1f6d3a08
RH
1109{
1110 size_t si, i, j, n = stack_vars_num;
1111
1112 for (si = 0; si < n; ++si)
1113 {
1114 HOST_WIDE_INT offset;
1115
1116 i = stack_vars_sorted[si];
1117
1118 /* Skip variables that aren't partition representatives, for now. */
1119 if (stack_vars[i].representative != i)
1120 continue;
1121
7d69de61
RH
1122 /* Skip variables that have already had rtl assigned. See also
1123 add_stack_var where we perpetrate this pc_rtx hack. */
4e3825db
MM
1124 if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME
1125 ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)]
1126 : DECL_RTL (stack_vars[i].decl)) != pc_rtx)
7d69de61
RH
1127 continue;
1128
c22cacf3 1129 /* Check the predicate to see whether this variable should be
7d69de61
RH
1130 allocated in this pass. */
1131 if (pred && !pred (stack_vars[i].decl))
1132 continue;
1133
1f6d3a08
RH
1134 offset = alloc_stack_frame_space (stack_vars[i].size,
1135 stack_vars[i].alignb);
1136
1137 /* Create rtl for each variable based on their location within the
1138 partition. */
1139 for (j = i; j != EOC; j = stack_vars[j].next)
f8da8190
AP
1140 {
1141 gcc_assert (stack_vars[j].offset <= stack_vars[i].size);
1142 expand_one_stack_var_at (stack_vars[j].decl,
1143 stack_vars[j].offset + offset);
1144 }
1f6d3a08
RH
1145 }
1146}
1147
ff28a94d
JH
1148/* Take into account all sizes of partitions and reset DECL_RTLs. */
1149static HOST_WIDE_INT
1150account_stack_vars (void)
1151{
1152 size_t si, j, i, n = stack_vars_num;
1153 HOST_WIDE_INT size = 0;
1154
1155 for (si = 0; si < n; ++si)
1156 {
1157 i = stack_vars_sorted[si];
1158
1159 /* Skip variables that aren't partition representatives, for now. */
1160 if (stack_vars[i].representative != i)
1161 continue;
1162
1163 size += stack_vars[i].size;
1164 for (j = i; j != EOC; j = stack_vars[j].next)
4e3825db 1165 set_rtl (stack_vars[j].decl, NULL);
ff28a94d
JH
1166 }
1167 return size;
1168}
1169
1f6d3a08
RH
1170/* A subroutine of expand_one_var. Called to immediately assign rtl
1171 to a variable to be allocated in the stack frame. */
1172
1173static void
1174expand_one_stack_var (tree var)
1175{
1176 HOST_WIDE_INT size, offset, align;
1177
4e3825db
MM
1178 size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1);
1179 align = get_decl_align_unit (SSAVAR (var));
1f6d3a08
RH
1180 offset = alloc_stack_frame_space (size, align);
1181
1182 expand_one_stack_var_at (var, offset);
1183}
1184
1f6d3a08
RH
1185/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1186 that will reside in a hard register. */
1187
1188static void
1189expand_one_hard_reg_var (tree var)
1190{
1191 rest_of_decl_compilation (var, 0, 0);
1192}
1193
1194/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1195 that will reside in a pseudo register. */
1196
1197static void
1198expand_one_register_var (tree var)
1199{
4e3825db
MM
1200 tree decl = SSAVAR (var);
1201 tree type = TREE_TYPE (decl);
1f6d3a08
RH
1202 int unsignedp = TYPE_UNSIGNED (type);
1203 enum machine_mode reg_mode
4e3825db 1204 = promote_mode (type, DECL_MODE (decl), &unsignedp, 0);
1f6d3a08
RH
1205 rtx x = gen_reg_rtx (reg_mode);
1206
4e3825db 1207 set_rtl (var, x);
1f6d3a08
RH
1208
1209 /* Note if the object is a user variable. */
4e3825db
MM
1210 if (!DECL_ARTIFICIAL (decl))
1211 mark_user_reg (x);
1f6d3a08 1212
61021c2c 1213 if (POINTER_TYPE_P (type))
4e3825db 1214 mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type)));
1f6d3a08
RH
1215}
1216
1217/* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
128a79fb 1218 has some associated error, e.g. its type is error-mark. We just need
1f6d3a08
RH
1219 to pick something that won't crash the rest of the compiler. */
1220
1221static void
1222expand_one_error_var (tree var)
1223{
1224 enum machine_mode mode = DECL_MODE (var);
1225 rtx x;
1226
1227 if (mode == BLKmode)
1228 x = gen_rtx_MEM (BLKmode, const0_rtx);
1229 else if (mode == VOIDmode)
1230 x = const0_rtx;
1231 else
1232 x = gen_reg_rtx (mode);
1233
1234 SET_DECL_RTL (var, x);
1235}
1236
c22cacf3 1237/* A subroutine of expand_one_var. VAR is a variable that will be
1f6d3a08
RH
1238 allocated to the local stack frame. Return true if we wish to
1239 add VAR to STACK_VARS so that it will be coalesced with other
1240 variables. Return false to allocate VAR immediately.
1241
1242 This function is used to reduce the number of variables considered
1243 for coalescing, which reduces the size of the quadratic problem. */
1244
1245static bool
1246defer_stack_allocation (tree var, bool toplevel)
1247{
7d69de61
RH
1248 /* If stack protection is enabled, *all* stack variables must be deferred,
1249 so that we can re-order the strings to the top of the frame. */
1250 if (flag_stack_protect)
1251 return true;
1252
1f6d3a08
RH
1253 /* Variables in the outermost scope automatically conflict with
1254 every other variable. The only reason to want to defer them
1255 at all is that, after sorting, we can more efficiently pack
1256 small variables in the stack frame. Continue to defer at -O2. */
1257 if (toplevel && optimize < 2)
1258 return false;
1259
1260 /* Without optimization, *most* variables are allocated from the
1261 stack, which makes the quadratic problem large exactly when we
c22cacf3 1262 want compilation to proceed as quickly as possible. On the
1f6d3a08
RH
1263 other hand, we don't want the function's stack frame size to
1264 get completely out of hand. So we avoid adding scalars and
1265 "small" aggregates to the list at all. */
1266 if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32)
1267 return false;
1268
1269 return true;
1270}
1271
1272/* A subroutine of expand_used_vars. Expand one variable according to
2a7e31df 1273 its flavor. Variables to be placed on the stack are not actually
ff28a94d
JH
1274 expanded yet, merely recorded.
1275 When REALLY_EXPAND is false, only add stack values to be allocated.
1276 Return stack usage this variable is supposed to take.
1277*/
1f6d3a08 1278
ff28a94d
JH
1279static HOST_WIDE_INT
1280expand_one_var (tree var, bool toplevel, bool really_expand)
1f6d3a08 1281{
4e3825db
MM
1282 tree origvar = var;
1283 var = SSAVAR (var);
1284
2e3f842f
L
1285 if (SUPPORTS_STACK_ALIGNMENT
1286 && TREE_TYPE (var) != error_mark_node
1287 && TREE_CODE (var) == VAR_DECL)
1288 {
1289 unsigned int align;
1290
1291 /* Because we don't know if VAR will be in register or on stack,
1292 we conservatively assume it will be on stack even if VAR is
1293 eventually put into register after RA pass. For non-automatic
1294 variables, which won't be on stack, we collect alignment of
1295 type and ignore user specified alignment. */
1296 if (TREE_STATIC (var) || DECL_EXTERNAL (var))
ae58e548
JJ
1297 align = MINIMUM_ALIGNMENT (TREE_TYPE (var),
1298 TYPE_MODE (TREE_TYPE (var)),
1299 TYPE_ALIGN (TREE_TYPE (var)));
2e3f842f 1300 else
ae58e548 1301 align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var));
2e3f842f
L
1302
1303 if (crtl->stack_alignment_estimated < align)
1304 {
1305 /* stack_alignment_estimated shouldn't change after stack
1306 realign decision made */
1307 gcc_assert(!crtl->stack_realign_processed);
1308 crtl->stack_alignment_estimated = align;
1309 }
1310 }
1311
4e3825db
MM
1312 if (TREE_CODE (origvar) == SSA_NAME)
1313 {
1314 gcc_assert (TREE_CODE (var) != VAR_DECL
1315 || (!DECL_EXTERNAL (var)
1316 && !DECL_HAS_VALUE_EXPR_P (var)
1317 && !TREE_STATIC (var)
4e3825db
MM
1318 && TREE_TYPE (var) != error_mark_node
1319 && !DECL_HARD_REGISTER (var)
1320 && really_expand));
1321 }
1322 if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME)
4846b435 1323 ;
1f6d3a08
RH
1324 else if (DECL_EXTERNAL (var))
1325 ;
833b3afe 1326 else if (DECL_HAS_VALUE_EXPR_P (var))
1f6d3a08
RH
1327 ;
1328 else if (TREE_STATIC (var))
7e8b322a 1329 ;
eb7adebc 1330 else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var))
1f6d3a08
RH
1331 ;
1332 else if (TREE_TYPE (var) == error_mark_node)
ff28a94d
JH
1333 {
1334 if (really_expand)
1335 expand_one_error_var (var);
1336 }
4e3825db 1337 else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var))
ff28a94d
JH
1338 {
1339 if (really_expand)
1340 expand_one_hard_reg_var (var);
1341 }
1f6d3a08 1342 else if (use_register_for_decl (var))
ff28a94d
JH
1343 {
1344 if (really_expand)
4e3825db 1345 expand_one_register_var (origvar);
ff28a94d 1346 }
1f6d3a08 1347 else if (defer_stack_allocation (var, toplevel))
4e3825db 1348 add_stack_var (origvar);
1f6d3a08 1349 else
ff28a94d 1350 {
bd9f1b4b 1351 if (really_expand)
4e3825db 1352 expand_one_stack_var (origvar);
ff28a94d
JH
1353 return tree_low_cst (DECL_SIZE_UNIT (var), 1);
1354 }
1355 return 0;
1f6d3a08
RH
1356}
1357
1358/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1359 expanding variables. Those variables that can be put into registers
1360 are allocated pseudos; those that can't are put on the stack.
1361
1362 TOPLEVEL is true if this is the outermost BLOCK. */
1363
1364static void
1365expand_used_vars_for_block (tree block, bool toplevel)
1366{
1367 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1368 tree t;
1369
1370 old_sv_num = toplevel ? 0 : stack_vars_num;
1371
1372 /* Expand all variables at this level. */
1373 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
7e8b322a 1374 if (TREE_USED (t))
ff28a94d 1375 expand_one_var (t, toplevel, true);
1f6d3a08
RH
1376
1377 this_sv_num = stack_vars_num;
1378
1379 /* Expand all variables at containing levels. */
1380 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1381 expand_used_vars_for_block (t, false);
1382
1383 /* Since we do not track exact variable lifetimes (which is not even
6fc0bb99 1384 possible for variables whose address escapes), we mirror the block
1f6d3a08
RH
1385 tree in the interference graph. Here we cause all variables at this
1386 level, and all sublevels, to conflict. Do make certain that a
1387 variable conflicts with itself. */
1388 if (old_sv_num < this_sv_num)
1389 {
1390 new_sv_num = stack_vars_num;
1391 resize_stack_vars_conflict (new_sv_num);
1392
1393 for (i = old_sv_num; i < new_sv_num; ++i)
f4a6d54e
RH
1394 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1395 add_stack_var_conflict (i, j);
1f6d3a08
RH
1396 }
1397}
1398
1399/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1400 and clear TREE_USED on all local variables. */
1401
1402static void
1403clear_tree_used (tree block)
1404{
1405 tree t;
1406
1407 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1408 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1409 TREE_USED (t) = 0;
1410
1411 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1412 clear_tree_used (t);
1413}
1414
7d69de61
RH
1415/* Examine TYPE and determine a bit mask of the following features. */
1416
1417#define SPCT_HAS_LARGE_CHAR_ARRAY 1
1418#define SPCT_HAS_SMALL_CHAR_ARRAY 2
1419#define SPCT_HAS_ARRAY 4
1420#define SPCT_HAS_AGGREGATE 8
1421
1422static unsigned int
1423stack_protect_classify_type (tree type)
1424{
1425 unsigned int ret = 0;
1426 tree t;
1427
1428 switch (TREE_CODE (type))
1429 {
1430 case ARRAY_TYPE:
1431 t = TYPE_MAIN_VARIANT (TREE_TYPE (type));
1432 if (t == char_type_node
1433 || t == signed_char_type_node
1434 || t == unsigned_char_type_node)
1435 {
15362b89
JJ
1436 unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE);
1437 unsigned HOST_WIDE_INT len;
7d69de61 1438
15362b89
JJ
1439 if (!TYPE_SIZE_UNIT (type)
1440 || !host_integerp (TYPE_SIZE_UNIT (type), 1))
1441 len = max;
7d69de61 1442 else
15362b89 1443 len = tree_low_cst (TYPE_SIZE_UNIT (type), 1);
7d69de61
RH
1444
1445 if (len < max)
1446 ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY;
1447 else
1448 ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY;
1449 }
1450 else
1451 ret = SPCT_HAS_ARRAY;
1452 break;
1453
1454 case UNION_TYPE:
1455 case QUAL_UNION_TYPE:
1456 case RECORD_TYPE:
1457 ret = SPCT_HAS_AGGREGATE;
1458 for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t))
1459 if (TREE_CODE (t) == FIELD_DECL)
1460 ret |= stack_protect_classify_type (TREE_TYPE (t));
1461 break;
1462
1463 default:
1464 break;
1465 }
1466
1467 return ret;
1468}
1469
a4d05547
KH
1470/* Return nonzero if DECL should be segregated into the "vulnerable" upper
1471 part of the local stack frame. Remember if we ever return nonzero for
7d69de61
RH
1472 any variable in this function. The return value is the phase number in
1473 which the variable should be allocated. */
1474
1475static int
1476stack_protect_decl_phase (tree decl)
1477{
1478 unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl));
1479 int ret = 0;
1480
1481 if (bits & SPCT_HAS_SMALL_CHAR_ARRAY)
1482 has_short_buffer = true;
1483
1484 if (flag_stack_protect == 2)
1485 {
1486 if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY))
1487 && !(bits & SPCT_HAS_AGGREGATE))
1488 ret = 1;
1489 else if (bits & SPCT_HAS_ARRAY)
1490 ret = 2;
1491 }
1492 else
1493 ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0;
1494
1495 if (ret)
1496 has_protected_decls = true;
1497
1498 return ret;
1499}
1500
1501/* Two helper routines that check for phase 1 and phase 2. These are used
1502 as callbacks for expand_stack_vars. */
1503
1504static bool
1505stack_protect_decl_phase_1 (tree decl)
1506{
1507 return stack_protect_decl_phase (decl) == 1;
1508}
1509
1510static bool
1511stack_protect_decl_phase_2 (tree decl)
1512{
1513 return stack_protect_decl_phase (decl) == 2;
1514}
1515
1516/* Ensure that variables in different stack protection phases conflict
1517 so that they are not merged and share the same stack slot. */
1518
1519static void
1520add_stack_protection_conflicts (void)
1521{
1522 size_t i, j, n = stack_vars_num;
1523 unsigned char *phase;
1524
1525 phase = XNEWVEC (unsigned char, n);
1526 for (i = 0; i < n; ++i)
1527 phase[i] = stack_protect_decl_phase (stack_vars[i].decl);
1528
1529 for (i = 0; i < n; ++i)
1530 {
1531 unsigned char ph_i = phase[i];
1532 for (j = 0; j < i; ++j)
1533 if (ph_i != phase[j])
1534 add_stack_var_conflict (i, j);
1535 }
1536
1537 XDELETEVEC (phase);
1538}
1539
1540/* Create a decl for the guard at the top of the stack frame. */
1541
1542static void
1543create_stack_guard (void)
1544{
c2255bc4
AH
1545 tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl),
1546 VAR_DECL, NULL, ptr_type_node);
7d69de61
RH
1547 TREE_THIS_VOLATILE (guard) = 1;
1548 TREE_USED (guard) = 1;
1549 expand_one_stack_var (guard);
cb91fab0 1550 crtl->stack_protect_guard = guard;
7d69de61
RH
1551}
1552
ff28a94d
JH
1553/* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1554 expanding variables. Those variables that can be put into registers
1555 are allocated pseudos; those that can't are put on the stack.
1556
1557 TOPLEVEL is true if this is the outermost BLOCK. */
1558
1559static HOST_WIDE_INT
1560account_used_vars_for_block (tree block, bool toplevel)
1561{
1562 size_t i, j, old_sv_num, this_sv_num, new_sv_num;
1563 tree t;
1564 HOST_WIDE_INT size = 0;
1565
1566 old_sv_num = toplevel ? 0 : stack_vars_num;
1567
1568 /* Expand all variables at this level. */
1569 for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t))
1570 if (TREE_USED (t))
1571 size += expand_one_var (t, toplevel, false);
1572
1573 this_sv_num = stack_vars_num;
1574
1575 /* Expand all variables at containing levels. */
1576 for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t))
1577 size += account_used_vars_for_block (t, false);
1578
1579 /* Since we do not track exact variable lifetimes (which is not even
1580 possible for variables whose address escapes), we mirror the block
1581 tree in the interference graph. Here we cause all variables at this
1582 level, and all sublevels, to conflict. Do make certain that a
1583 variable conflicts with itself. */
1584 if (old_sv_num < this_sv_num)
1585 {
1586 new_sv_num = stack_vars_num;
1587 resize_stack_vars_conflict (new_sv_num);
1588
1589 for (i = old_sv_num; i < new_sv_num; ++i)
1590 for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;)
1591 add_stack_var_conflict (i, j);
1592 }
1593 return size;
1594}
1595
1596/* Prepare for expanding variables. */
1597static void
1598init_vars_expansion (void)
1599{
1600 tree t;
cb91fab0
JH
1601 /* Set TREE_USED on all variables in the local_decls. */
1602 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1603 TREE_USED (TREE_VALUE (t)) = 1;
1604
1605 /* Clear TREE_USED on all variables associated with a block scope. */
1606 clear_tree_used (DECL_INITIAL (current_function_decl));
1607
1608 /* Initialize local stack smashing state. */
1609 has_protected_decls = false;
1610 has_short_buffer = false;
1611}
1612
1613/* Free up stack variable graph data. */
1614static void
1615fini_vars_expansion (void)
1616{
1617 XDELETEVEC (stack_vars);
1618 XDELETEVEC (stack_vars_sorted);
1619 XDELETEVEC (stack_vars_conflict);
1620 stack_vars = NULL;
1621 stack_vars_alloc = stack_vars_num = 0;
1622 stack_vars_conflict = NULL;
1623 stack_vars_conflict_alloc = 0;
1624}
1625
b5a430f3
SB
1626/* Make a fair guess for the size of the stack frame of the current
1627 function. This doesn't have to be exact, the result is only used
1628 in the inline heuristics. So we don't want to run the full stack
1629 var packing algorithm (which is quadratic in the number of stack
1630 vars). Instead, we calculate the total size of all stack vars.
1631 This turns out to be a pretty fair estimate -- packing of stack
1632 vars doesn't happen very often. */
1633
ff28a94d
JH
1634HOST_WIDE_INT
1635estimated_stack_frame_size (void)
1636{
1637 HOST_WIDE_INT size = 0;
b5a430f3 1638 size_t i;
ff28a94d
JH
1639 tree t, outer_block = DECL_INITIAL (current_function_decl);
1640
1641 init_vars_expansion ();
1642
cb91fab0 1643 for (t = cfun->local_decls; t; t = TREE_CHAIN (t))
ff28a94d
JH
1644 {
1645 tree var = TREE_VALUE (t);
1646
1647 if (TREE_USED (var))
1648 size += expand_one_var (var, true, false);
1649 TREE_USED (var) = 1;
1650 }
1651 size += account_used_vars_for_block (outer_block, true);
b5a430f3 1652
ff28a94d
JH
1653 if (stack_vars_num > 0)
1654 {
b5a430f3
SB
1655 /* Fake sorting the stack vars for account_stack_vars (). */
1656 stack_vars_sorted = XNEWVEC (size_t, stack_vars_num);
1657 for (i = 0; i < stack_vars_num; ++i)
1658 stack_vars_sorted[i] = i;
ff28a94d
JH
1659 size += account_stack_vars ();
1660 fini_vars_expansion ();
1661 }
b5a430f3 1662
ff28a94d
JH
1663 return size;
1664}
1665
1f6d3a08 1666/* Expand all variables used in the function. */
727a31fa
RH
1667
1668static void
1669expand_used_vars (void)
1670{
802e9f8e 1671 tree t, next, outer_block = DECL_INITIAL (current_function_decl);
4e3825db 1672 unsigned i;
727a31fa 1673
1f6d3a08
RH
1674 /* Compute the phase of the stack frame for this function. */
1675 {
1676 int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1677 int off = STARTING_FRAME_OFFSET % align;
1678 frame_phase = off ? align - off : 0;
1679 }
727a31fa 1680
ff28a94d 1681 init_vars_expansion ();
7d69de61 1682
4e3825db
MM
1683 for (i = 0; i < SA.map->num_partitions; i++)
1684 {
1685 tree var = partition_to_var (SA.map, i);
1686
1687 gcc_assert (is_gimple_reg (var));
1688 if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL)
1689 expand_one_var (var, true, true);
1690 else
1691 {
1692 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1693 contain the default def (representing the parm or result itself)
1694 we don't do anything here. But those which don't contain the
1695 default def (representing a temporary based on the parm/result)
1696 we need to allocate space just like for normal VAR_DECLs. */
1697 if (!bitmap_bit_p (SA.partition_has_default_def, i))
1698 {
1699 expand_one_var (var, true, true);
1700 gcc_assert (SA.partition_to_pseudo[i]);
1701 }
1702 }
1703 }
1704
cb91fab0 1705 /* At this point all variables on the local_decls with TREE_USED
1f6d3a08 1706 set are not associated with any block scope. Lay them out. */
802e9f8e
JJ
1707 t = cfun->local_decls;
1708 cfun->local_decls = NULL_TREE;
1709 for (; t; t = next)
1f6d3a08
RH
1710 {
1711 tree var = TREE_VALUE (t);
1712 bool expand_now = false;
1713
802e9f8e
JJ
1714 next = TREE_CHAIN (t);
1715
4e3825db
MM
1716 /* Expanded above already. */
1717 if (is_gimple_reg (var))
eb7adebc
MM
1718 {
1719 TREE_USED (var) = 0;
1720 ggc_free (t);
1721 continue;
1722 }
1f6d3a08
RH
1723 /* We didn't set a block for static or extern because it's hard
1724 to tell the difference between a global variable (re)declared
1725 in a local scope, and one that's really declared there to
1726 begin with. And it doesn't really matter much, since we're
1727 not giving them stack space. Expand them now. */
4e3825db 1728 else if (TREE_STATIC (var) || DECL_EXTERNAL (var))
1f6d3a08
RH
1729 expand_now = true;
1730
1731 /* If the variable is not associated with any block, then it
1732 was created by the optimizers, and could be live anywhere
1733 in the function. */
1734 else if (TREE_USED (var))
1735 expand_now = true;
1736
1737 /* Finally, mark all variables on the list as used. We'll use
1738 this in a moment when we expand those associated with scopes. */
1739 TREE_USED (var) = 1;
1740
1741 if (expand_now)
802e9f8e
JJ
1742 {
1743 expand_one_var (var, true, true);
1744 if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var))
1745 {
1746 rtx rtl = DECL_RTL_IF_SET (var);
1747
1748 /* Keep artificial non-ignored vars in cfun->local_decls
1749 chain until instantiate_decls. */
1750 if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT))
1751 {
1752 TREE_CHAIN (t) = cfun->local_decls;
1753 cfun->local_decls = t;
1754 continue;
1755 }
1756 }
1757 }
1758
1759 ggc_free (t);
1f6d3a08 1760 }
1f6d3a08
RH
1761
1762 /* At this point, all variables within the block tree with TREE_USED
1763 set are actually used by the optimized function. Lay them out. */
1764 expand_used_vars_for_block (outer_block, true);
1765
1766 if (stack_vars_num > 0)
1767 {
1768 /* Due to the way alias sets work, no variables with non-conflicting
c22cacf3 1769 alias sets may be assigned the same address. Add conflicts to
1f6d3a08
RH
1770 reflect this. */
1771 add_alias_set_conflicts ();
1772
c22cacf3 1773 /* If stack protection is enabled, we don't share space between
7d69de61
RH
1774 vulnerable data and non-vulnerable data. */
1775 if (flag_stack_protect)
1776 add_stack_protection_conflicts ();
1777
c22cacf3 1778 /* Now that we have collected all stack variables, and have computed a
1f6d3a08
RH
1779 minimal interference graph, attempt to save some stack space. */
1780 partition_stack_vars ();
1781 if (dump_file)
1782 dump_stack_var_partition ();
7d69de61
RH
1783 }
1784
1785 /* There are several conditions under which we should create a
1786 stack guard: protect-all, alloca used, protected decls present. */
1787 if (flag_stack_protect == 2
1788 || (flag_stack_protect
e3b5732b 1789 && (cfun->calls_alloca || has_protected_decls)))
7d69de61 1790 create_stack_guard ();
1f6d3a08 1791
7d69de61
RH
1792 /* Assign rtl to each variable based on these partitions. */
1793 if (stack_vars_num > 0)
1794 {
1795 /* Reorder decls to be protected by iterating over the variables
1796 array multiple times, and allocating out of each phase in turn. */
c22cacf3 1797 /* ??? We could probably integrate this into the qsort we did
7d69de61
RH
1798 earlier, such that we naturally see these variables first,
1799 and thus naturally allocate things in the right order. */
1800 if (has_protected_decls)
1801 {
1802 /* Phase 1 contains only character arrays. */
1803 expand_stack_vars (stack_protect_decl_phase_1);
1804
1805 /* Phase 2 contains other kinds of arrays. */
1806 if (flag_stack_protect == 2)
1807 expand_stack_vars (stack_protect_decl_phase_2);
1808 }
1809
1810 expand_stack_vars (NULL);
1f6d3a08 1811
ff28a94d 1812 fini_vars_expansion ();
1f6d3a08
RH
1813 }
1814
1815 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1816 if (STACK_ALIGNMENT_NEEDED)
1817 {
1818 HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT;
1819 if (!FRAME_GROWS_DOWNWARD)
1820 frame_offset += align - 1;
1821 frame_offset &= -align;
1822 }
727a31fa
RH
1823}
1824
1825
b7211528
SB
1826/* If we need to produce a detailed dump, print the tree representation
1827 for STMT to the dump file. SINCE is the last RTX after which the RTL
1828 generated for STMT should have been appended. */
1829
1830static void
726a989a 1831maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since)
b7211528
SB
1832{
1833 if (dump_file && (dump_flags & TDF_DETAILS))
1834 {
1835 fprintf (dump_file, "\n;; ");
726a989a 1836 print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM);
b7211528
SB
1837 fprintf (dump_file, "\n");
1838
1839 print_rtl (dump_file, since ? NEXT_INSN (since) : since);
1840 }
1841}
1842
8b11009b
ZD
1843/* Maps the blocks that do not contain tree labels to rtx labels. */
1844
1845static struct pointer_map_t *lab_rtx_for_bb;
1846
a9b77cd1
ZD
1847/* Returns the label_rtx expression for a label starting basic block BB. */
1848
1849static rtx
726a989a 1850label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED)
a9b77cd1 1851{
726a989a
RB
1852 gimple_stmt_iterator gsi;
1853 tree lab;
1854 gimple lab_stmt;
8b11009b 1855 void **elt;
a9b77cd1
ZD
1856
1857 if (bb->flags & BB_RTL)
1858 return block_label (bb);
1859
8b11009b
ZD
1860 elt = pointer_map_contains (lab_rtx_for_bb, bb);
1861 if (elt)
ae50c0cb 1862 return (rtx) *elt;
8b11009b
ZD
1863
1864 /* Find the tree label if it is present. */
1865
726a989a 1866 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
a9b77cd1 1867 {
726a989a
RB
1868 lab_stmt = gsi_stmt (gsi);
1869 if (gimple_code (lab_stmt) != GIMPLE_LABEL)
a9b77cd1
ZD
1870 break;
1871
726a989a 1872 lab = gimple_label_label (lab_stmt);
a9b77cd1
ZD
1873 if (DECL_NONLOCAL (lab))
1874 break;
1875
1876 return label_rtx (lab);
1877 }
1878
8b11009b
ZD
1879 elt = pointer_map_insert (lab_rtx_for_bb, bb);
1880 *elt = gen_label_rtx ();
ae50c0cb 1881 return (rtx) *elt;
a9b77cd1
ZD
1882}
1883
726a989a 1884
529ff441
MM
1885/* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1886 of a basic block where we just expanded the conditional at the end,
1887 possibly clean up the CFG and instruction sequence. */
1888
1889static void
1890maybe_cleanup_end_of_block (edge e)
1891{
1892 /* Special case: when jumpif decides that the condition is
1893 trivial it emits an unconditional jump (and the necessary
1894 barrier). But we still have two edges, the fallthru one is
1895 wrong. purge_dead_edges would clean this up later. Unfortunately
1896 we have to insert insns (and split edges) before
1897 find_many_sub_basic_blocks and hence before purge_dead_edges.
1898 But splitting edges might create new blocks which depend on the
1899 fact that if there are two edges there's no barrier. So the
1900 barrier would get lost and verify_flow_info would ICE. Instead
1901 of auditing all edge splitters to care for the barrier (which
1902 normally isn't there in a cleaned CFG), fix it here. */
1903 if (BARRIER_P (get_last_insn ()))
1904 {
1905 basic_block bb = e->src;
1906 rtx insn;
1907 remove_edge (e);
1908 /* Now, we have a single successor block, if we have insns to
1909 insert on the remaining edge we potentially will insert
1910 it at the end of this block (if the dest block isn't feasible)
1911 in order to avoid splitting the edge. This insertion will take
1912 place in front of the last jump. But we might have emitted
1913 multiple jumps (conditional and one unconditional) to the
1914 same destination. Inserting in front of the last one then
1915 is a problem. See PR 40021. We fix this by deleting all
1916 jumps except the last unconditional one. */
1917 insn = PREV_INSN (get_last_insn ());
1918 /* Make sure we have an unconditional jump. Otherwise we're
1919 confused. */
1920 gcc_assert (JUMP_P (insn) && !any_condjump_p (insn));
1921 for (insn = PREV_INSN (insn); insn != BB_HEAD (bb);)
1922 {
1923 insn = PREV_INSN (insn);
1924 if (JUMP_P (NEXT_INSN (insn)))
1925 delete_insn (NEXT_INSN (insn));
1926 }
1927 }
1928}
1929
1930
726a989a 1931/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
80c7a9eb
RH
1932 Returns a new basic block if we've terminated the current basic
1933 block and created a new one. */
1934
1935static basic_block
726a989a 1936expand_gimple_cond (basic_block bb, gimple stmt)
80c7a9eb
RH
1937{
1938 basic_block new_bb, dest;
1939 edge new_edge;
1940 edge true_edge;
1941 edge false_edge;
726a989a 1942 tree pred = gimple_cond_pred_to_tree (stmt);
b7211528
SB
1943 rtx last2, last;
1944
1945 last2 = last = get_last_insn ();
80c7a9eb
RH
1946
1947 extract_true_false_edges_from_block (bb, &true_edge, &false_edge);
726a989a 1948 if (gimple_has_location (stmt))
80c7a9eb 1949 {
726a989a
RB
1950 set_curr_insn_source_location (gimple_location (stmt));
1951 set_curr_insn_block (gimple_block (stmt));
80c7a9eb
RH
1952 }
1953
1954 /* These flags have no purpose in RTL land. */
1955 true_edge->flags &= ~EDGE_TRUE_VALUE;
1956 false_edge->flags &= ~EDGE_FALSE_VALUE;
1957
1958 /* We can either have a pure conditional jump with one fallthru edge or
1959 two-way jump that needs to be decomposed into two basic blocks. */
a9b77cd1 1960 if (false_edge->dest == bb->next_bb)
80c7a9eb 1961 {
a9b77cd1 1962 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1963 add_reg_br_prob_note (last, true_edge->probability);
726a989a 1964 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1965 if (true_edge->goto_locus)
7241571e
JJ
1966 {
1967 set_curr_insn_source_location (true_edge->goto_locus);
1968 set_curr_insn_block (true_edge->goto_block);
1969 true_edge->goto_locus = curr_insn_locator ();
1970 }
1971 true_edge->goto_block = NULL;
a9b77cd1 1972 false_edge->flags |= EDGE_FALLTHRU;
726a989a 1973 ggc_free (pred);
529ff441 1974 maybe_cleanup_end_of_block (false_edge);
80c7a9eb
RH
1975 return NULL;
1976 }
a9b77cd1 1977 if (true_edge->dest == bb->next_bb)
80c7a9eb 1978 {
a9b77cd1 1979 jumpifnot (pred, label_rtx_for_bb (false_edge->dest));
10d22567 1980 add_reg_br_prob_note (last, false_edge->probability);
726a989a 1981 maybe_dump_rtl_for_gimple_stmt (stmt, last);
a9b77cd1 1982 if (false_edge->goto_locus)
7241571e
JJ
1983 {
1984 set_curr_insn_source_location (false_edge->goto_locus);
1985 set_curr_insn_block (false_edge->goto_block);
1986 false_edge->goto_locus = curr_insn_locator ();
1987 }
1988 false_edge->goto_block = NULL;
a9b77cd1 1989 true_edge->flags |= EDGE_FALLTHRU;
726a989a 1990 ggc_free (pred);
529ff441 1991 maybe_cleanup_end_of_block (true_edge);
80c7a9eb
RH
1992 return NULL;
1993 }
80c7a9eb 1994
a9b77cd1 1995 jumpif (pred, label_rtx_for_bb (true_edge->dest));
10d22567 1996 add_reg_br_prob_note (last, true_edge->probability);
80c7a9eb 1997 last = get_last_insn ();
7241571e
JJ
1998 if (false_edge->goto_locus)
1999 {
2000 set_curr_insn_source_location (false_edge->goto_locus);
2001 set_curr_insn_block (false_edge->goto_block);
2002 false_edge->goto_locus = curr_insn_locator ();
2003 }
2004 false_edge->goto_block = NULL;
a9b77cd1 2005 emit_jump (label_rtx_for_bb (false_edge->dest));
80c7a9eb
RH
2006
2007 BB_END (bb) = last;
2008 if (BARRIER_P (BB_END (bb)))
2009 BB_END (bb) = PREV_INSN (BB_END (bb));
2010 update_bb_for_insn (bb);
2011
2012 new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2013 dest = false_edge->dest;
2014 redirect_edge_succ (false_edge, new_bb);
2015 false_edge->flags |= EDGE_FALLTHRU;
2016 new_bb->count = false_edge->count;
2017 new_bb->frequency = EDGE_FREQUENCY (false_edge);
2018 new_edge = make_edge (new_bb, dest, 0);
2019 new_edge->probability = REG_BR_PROB_BASE;
2020 new_edge->count = new_bb->count;
2021 if (BARRIER_P (BB_END (new_bb)))
2022 BB_END (new_bb) = PREV_INSN (BB_END (new_bb));
2023 update_bb_for_insn (new_bb);
2024
726a989a 2025 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
c22cacf3 2026
7787b4aa
JJ
2027 if (true_edge->goto_locus)
2028 {
2029 set_curr_insn_source_location (true_edge->goto_locus);
2030 set_curr_insn_block (true_edge->goto_block);
2031 true_edge->goto_locus = curr_insn_locator ();
2032 }
2033 true_edge->goto_block = NULL;
2034
726a989a 2035 ggc_free (pred);
80c7a9eb
RH
2036 return new_bb;
2037}
2038
726a989a 2039/* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
224e770b
RH
2040 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2041 generated a tail call (something that might be denied by the ABI
cea49550
RH
2042 rules governing the call; see calls.c).
2043
2044 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2045 can still reach the rest of BB. The case here is __builtin_sqrt,
2046 where the NaN result goes through the external function (with a
2047 tailcall) and the normal result happens via a sqrt instruction. */
80c7a9eb
RH
2048
2049static basic_block
726a989a 2050expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru)
80c7a9eb 2051{
b7211528 2052 rtx last2, last;
224e770b 2053 edge e;
628f6a4e 2054 edge_iterator ei;
224e770b
RH
2055 int probability;
2056 gcov_type count;
726a989a 2057 tree stmt_tree = gimple_to_tree (stmt);
80c7a9eb 2058
b7211528
SB
2059 last2 = last = get_last_insn ();
2060
726a989a
RB
2061 expand_expr_stmt (stmt_tree);
2062
2063 release_stmt_tree (stmt, stmt_tree);
80c7a9eb
RH
2064
2065 for (last = NEXT_INSN (last); last; last = NEXT_INSN (last))
224e770b
RH
2066 if (CALL_P (last) && SIBLING_CALL_P (last))
2067 goto found;
80c7a9eb 2068
726a989a 2069 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2070
cea49550 2071 *can_fallthru = true;
224e770b 2072 return NULL;
80c7a9eb 2073
224e770b
RH
2074 found:
2075 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2076 Any instructions emitted here are about to be deleted. */
2077 do_pending_stack_adjust ();
2078
2079 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2080 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2081 EH or abnormal edges, we shouldn't have created a tail call in
2082 the first place. So it seems to me we should just be removing
2083 all edges here, or redirecting the existing fallthru edge to
2084 the exit block. */
2085
224e770b
RH
2086 probability = 0;
2087 count = 0;
224e770b 2088
628f6a4e
BE
2089 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2090 {
224e770b
RH
2091 if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH)))
2092 {
2093 if (e->dest != EXIT_BLOCK_PTR)
80c7a9eb 2094 {
224e770b
RH
2095 e->dest->count -= e->count;
2096 e->dest->frequency -= EDGE_FREQUENCY (e);
2097 if (e->dest->count < 0)
c22cacf3 2098 e->dest->count = 0;
224e770b 2099 if (e->dest->frequency < 0)
c22cacf3 2100 e->dest->frequency = 0;
80c7a9eb 2101 }
224e770b
RH
2102 count += e->count;
2103 probability += e->probability;
2104 remove_edge (e);
80c7a9eb 2105 }
628f6a4e
BE
2106 else
2107 ei_next (&ei);
80c7a9eb
RH
2108 }
2109
224e770b
RH
2110 /* This is somewhat ugly: the call_expr expander often emits instructions
2111 after the sibcall (to perform the function return). These confuse the
12eff7b7 2112 find_many_sub_basic_blocks code, so we need to get rid of these. */
224e770b 2113 last = NEXT_INSN (last);
341c100f 2114 gcc_assert (BARRIER_P (last));
cea49550
RH
2115
2116 *can_fallthru = false;
224e770b
RH
2117 while (NEXT_INSN (last))
2118 {
2119 /* For instance an sqrt builtin expander expands if with
2120 sibcall in the then and label for `else`. */
2121 if (LABEL_P (NEXT_INSN (last)))
cea49550
RH
2122 {
2123 *can_fallthru = true;
2124 break;
2125 }
224e770b
RH
2126 delete_insn (NEXT_INSN (last));
2127 }
2128
2129 e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL);
2130 e->probability += probability;
2131 e->count += count;
2132 BB_END (bb) = last;
2133 update_bb_for_insn (bb);
2134
2135 if (NEXT_INSN (last))
2136 {
2137 bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb);
2138
2139 last = BB_END (bb);
2140 if (BARRIER_P (last))
2141 BB_END (bb) = PREV_INSN (last);
2142 }
2143
726a989a 2144 maybe_dump_rtl_for_gimple_stmt (stmt, last2);
b7211528 2145
224e770b 2146 return bb;
80c7a9eb
RH
2147}
2148
242229bb
JH
2149/* Expand basic block BB from GIMPLE trees to RTL. */
2150
2151static basic_block
10d22567 2152expand_gimple_basic_block (basic_block bb)
242229bb 2153{
726a989a
RB
2154 gimple_stmt_iterator gsi;
2155 gimple_seq stmts;
2156 gimple stmt = NULL;
242229bb
JH
2157 rtx note, last;
2158 edge e;
628f6a4e 2159 edge_iterator ei;
8b11009b 2160 void **elt;
242229bb
JH
2161
2162 if (dump_file)
726a989a
RB
2163 fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n",
2164 bb->index);
2165
2166 /* Note that since we are now transitioning from GIMPLE to RTL, we
2167 cannot use the gsi_*_bb() routines because they expect the basic
2168 block to be in GIMPLE, instead of RTL. Therefore, we need to
2169 access the BB sequence directly. */
2170 stmts = bb_seq (bb);
2171 bb->il.gimple = NULL;
bf08ebeb 2172 rtl_profile_for_bb (bb);
5e2d947c
JH
2173 init_rtl_bb_info (bb);
2174 bb->flags |= BB_RTL;
2175
a9b77cd1
ZD
2176 /* Remove the RETURN_EXPR if we may fall though to the exit
2177 instead. */
726a989a
RB
2178 gsi = gsi_last (stmts);
2179 if (!gsi_end_p (gsi)
2180 && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN)
a9b77cd1 2181 {
726a989a 2182 gimple ret_stmt = gsi_stmt (gsi);
a9b77cd1
ZD
2183
2184 gcc_assert (single_succ_p (bb));
2185 gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR);
2186
2187 if (bb->next_bb == EXIT_BLOCK_PTR
726a989a 2188 && !gimple_return_retval (ret_stmt))
a9b77cd1 2189 {
726a989a 2190 gsi_remove (&gsi, false);
a9b77cd1
ZD
2191 single_succ_edge (bb)->flags |= EDGE_FALLTHRU;
2192 }
2193 }
2194
726a989a
RB
2195 gsi = gsi_start (stmts);
2196 if (!gsi_end_p (gsi))
8b11009b 2197 {
726a989a
RB
2198 stmt = gsi_stmt (gsi);
2199 if (gimple_code (stmt) != GIMPLE_LABEL)
2200 stmt = NULL;
8b11009b 2201 }
242229bb 2202
8b11009b
ZD
2203 elt = pointer_map_contains (lab_rtx_for_bb, bb);
2204
2205 if (stmt || elt)
242229bb
JH
2206 {
2207 last = get_last_insn ();
2208
8b11009b
ZD
2209 if (stmt)
2210 {
726a989a
RB
2211 tree stmt_tree = gimple_to_tree (stmt);
2212 expand_expr_stmt (stmt_tree);
2213 release_stmt_tree (stmt, stmt_tree);
2214 gsi_next (&gsi);
8b11009b
ZD
2215 }
2216
2217 if (elt)
ae50c0cb 2218 emit_label ((rtx) *elt);
242229bb 2219
caf93cb0 2220 /* Java emits line number notes in the top of labels.
c22cacf3 2221 ??? Make this go away once line number notes are obsoleted. */
242229bb 2222 BB_HEAD (bb) = NEXT_INSN (last);
4b4bf941 2223 if (NOTE_P (BB_HEAD (bb)))
242229bb 2224 BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb));
242229bb 2225 note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb));
b7211528 2226
726a989a 2227 maybe_dump_rtl_for_gimple_stmt (stmt, last);
242229bb
JH
2228 }
2229 else
2230 note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK);
2231
2232 NOTE_BASIC_BLOCK (note) = bb;
2233
726a989a 2234 for (; !gsi_end_p (gsi); gsi_next (&gsi))
242229bb 2235 {
726a989a 2236 gimple stmt = gsi_stmt (gsi);
cea49550 2237 basic_block new_bb;
242229bb 2238
242229bb
JH
2239 /* Expand this statement, then evaluate the resulting RTL and
2240 fixup the CFG accordingly. */
726a989a 2241 if (gimple_code (stmt) == GIMPLE_COND)
cea49550 2242 {
726a989a 2243 new_bb = expand_gimple_cond (bb, stmt);
cea49550
RH
2244 if (new_bb)
2245 return new_bb;
2246 }
80c7a9eb 2247 else
242229bb 2248 {
726a989a 2249 if (is_gimple_call (stmt) && gimple_call_tail_p (stmt))
cea49550
RH
2250 {
2251 bool can_fallthru;
2252 new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru);
2253 if (new_bb)
2254 {
2255 if (can_fallthru)
2256 bb = new_bb;
2257 else
2258 return new_bb;
2259 }
2260 }
4d7a65ea 2261 else
b7211528 2262 {
4e3825db
MM
2263 def_operand_p def_p;
2264 tree stmt_tree;
2265 def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF);
2266
2267 if (def_p != NULL)
2268 {
2269 /* Ignore this stmt if it is in the list of
2270 replaceable expressions. */
2271 if (SA.values
e97809c6
MM
2272 && bitmap_bit_p (SA.values,
2273 SSA_NAME_VERSION (DEF_FROM_PTR (def_p))))
4e3825db
MM
2274 continue;
2275 }
2276 stmt_tree = gimple_to_tree (stmt);
b7211528 2277 last = get_last_insn ();
726a989a
RB
2278 expand_expr_stmt (stmt_tree);
2279 maybe_dump_rtl_for_gimple_stmt (stmt, last);
2280 release_stmt_tree (stmt, stmt_tree);
b7211528 2281 }
242229bb
JH
2282 }
2283 }
2284
7241571e 2285 /* Expand implicit goto and convert goto_locus. */
a9b77cd1
ZD
2286 FOR_EACH_EDGE (e, ei, bb->succs)
2287 {
7241571e
JJ
2288 if (e->goto_locus && e->goto_block)
2289 {
2290 set_curr_insn_source_location (e->goto_locus);
2291 set_curr_insn_block (e->goto_block);
2292 e->goto_locus = curr_insn_locator ();
2293 }
2294 e->goto_block = NULL;
2295 if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb)
2296 {
2297 emit_jump (label_rtx_for_bb (e->dest));
2298 e->flags &= ~EDGE_FALLTHRU;
2299 }
a9b77cd1
ZD
2300 }
2301
242229bb
JH
2302 do_pending_stack_adjust ();
2303
3f117656 2304 /* Find the block tail. The last insn in the block is the insn
242229bb
JH
2305 before a barrier and/or table jump insn. */
2306 last = get_last_insn ();
4b4bf941 2307 if (BARRIER_P (last))
242229bb
JH
2308 last = PREV_INSN (last);
2309 if (JUMP_TABLE_DATA_P (last))
2310 last = PREV_INSN (PREV_INSN (last));
2311 BB_END (bb) = last;
caf93cb0 2312
242229bb 2313 update_bb_for_insn (bb);
80c7a9eb 2314
242229bb
JH
2315 return bb;
2316}
2317
2318
2319/* Create a basic block for initialization code. */
2320
2321static basic_block
2322construct_init_block (void)
2323{
2324 basic_block init_block, first_block;
fd44f634
JH
2325 edge e = NULL;
2326 int flags;
275a4187 2327
fd44f634
JH
2328 /* Multiple entry points not supported yet. */
2329 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1);
5e2d947c
JH
2330 init_rtl_bb_info (ENTRY_BLOCK_PTR);
2331 init_rtl_bb_info (EXIT_BLOCK_PTR);
2332 ENTRY_BLOCK_PTR->flags |= BB_RTL;
2333 EXIT_BLOCK_PTR->flags |= BB_RTL;
242229bb 2334
fd44f634 2335 e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0);
275a4187 2336
fd44f634
JH
2337 /* When entry edge points to first basic block, we don't need jump,
2338 otherwise we have to jump into proper target. */
2339 if (e && e->dest != ENTRY_BLOCK_PTR->next_bb)
2340 {
726a989a 2341 tree label = gimple_block_label (e->dest);
fd44f634
JH
2342
2343 emit_jump (label_rtx (label));
2344 flags = 0;
275a4187 2345 }
fd44f634
JH
2346 else
2347 flags = EDGE_FALLTHRU;
242229bb
JH
2348
2349 init_block = create_basic_block (NEXT_INSN (get_insns ()),
2350 get_last_insn (),
2351 ENTRY_BLOCK_PTR);
2352 init_block->frequency = ENTRY_BLOCK_PTR->frequency;
2353 init_block->count = ENTRY_BLOCK_PTR->count;
2354 if (e)
2355 {
2356 first_block = e->dest;
2357 redirect_edge_succ (e, init_block);
fd44f634 2358 e = make_edge (init_block, first_block, flags);
242229bb
JH
2359 }
2360 else
2361 e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2362 e->probability = REG_BR_PROB_BASE;
2363 e->count = ENTRY_BLOCK_PTR->count;
2364
2365 update_bb_for_insn (init_block);
2366 return init_block;
2367}
2368
55e092c4
JH
2369/* For each lexical block, set BLOCK_NUMBER to the depth at which it is
2370 found in the block tree. */
2371
2372static void
2373set_block_levels (tree block, int level)
2374{
2375 while (block)
2376 {
2377 BLOCK_NUMBER (block) = level;
2378 set_block_levels (BLOCK_SUBBLOCKS (block), level + 1);
2379 block = BLOCK_CHAIN (block);
2380 }
2381}
242229bb
JH
2382
2383/* Create a block containing landing pads and similar stuff. */
2384
2385static void
2386construct_exit_block (void)
2387{
2388 rtx head = get_last_insn ();
2389 rtx end;
2390 basic_block exit_block;
628f6a4e
BE
2391 edge e, e2;
2392 unsigned ix;
2393 edge_iterator ei;
071a42f9 2394 rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb);
242229bb 2395
bf08ebeb
JH
2396 rtl_profile_for_bb (EXIT_BLOCK_PTR);
2397
caf93cb0 2398 /* Make sure the locus is set to the end of the function, so that
242229bb 2399 epilogue line numbers and warnings are set properly. */
6773e15f 2400 if (cfun->function_end_locus != UNKNOWN_LOCATION)
242229bb
JH
2401 input_location = cfun->function_end_locus;
2402
2403 /* The following insns belong to the top scope. */
55e092c4 2404 set_curr_insn_block (DECL_INITIAL (current_function_decl));
242229bb 2405
242229bb
JH
2406 /* Generate rtl for function exit. */
2407 expand_function_end ();
2408
2409 end = get_last_insn ();
2410 if (head == end)
2411 return;
071a42f9
JH
2412 /* While emitting the function end we could move end of the last basic block.
2413 */
2414 BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end;
4b4bf941 2415 while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head)))
242229bb 2416 head = NEXT_INSN (head);
80c7a9eb
RH
2417 exit_block = create_basic_block (NEXT_INSN (head), end,
2418 EXIT_BLOCK_PTR->prev_bb);
242229bb
JH
2419 exit_block->frequency = EXIT_BLOCK_PTR->frequency;
2420 exit_block->count = EXIT_BLOCK_PTR->count;
628f6a4e
BE
2421
2422 ix = 0;
2423 while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds))
242229bb 2424 {
8fb790fd 2425 e = EDGE_PRED (EXIT_BLOCK_PTR, ix);
242229bb 2426 if (!(e->flags & EDGE_ABNORMAL))
628f6a4e
BE
2427 redirect_edge_succ (e, exit_block);
2428 else
2429 ix++;
242229bb 2430 }
628f6a4e 2431
242229bb
JH
2432 e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU);
2433 e->probability = REG_BR_PROB_BASE;
2434 e->count = EXIT_BLOCK_PTR->count;
628f6a4e 2435 FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds)
242229bb
JH
2436 if (e2 != e)
2437 {
c22cacf3 2438 e->count -= e2->count;
242229bb
JH
2439 exit_block->count -= e2->count;
2440 exit_block->frequency -= EDGE_FREQUENCY (e2);
2441 }
2442 if (e->count < 0)
2443 e->count = 0;
2444 if (exit_block->count < 0)
2445 exit_block->count = 0;
2446 if (exit_block->frequency < 0)
2447 exit_block->frequency = 0;
2448 update_bb_for_insn (exit_block);
2449}
2450
c22cacf3 2451/* Helper function for discover_nonconstant_array_refs.
a1b23b2f
UW
2452 Look for ARRAY_REF nodes with non-constant indexes and mark them
2453 addressable. */
2454
2455static tree
2456discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees,
2457 void *data ATTRIBUTE_UNUSED)
2458{
2459 tree t = *tp;
2460
2461 if (IS_TYPE_OR_DECL_P (t))
2462 *walk_subtrees = 0;
2463 else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2464 {
2465 while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2466 && is_gimple_min_invariant (TREE_OPERAND (t, 1))
2467 && (!TREE_OPERAND (t, 2)
2468 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2469 || (TREE_CODE (t) == COMPONENT_REF
2470 && (!TREE_OPERAND (t,2)
2471 || is_gimple_min_invariant (TREE_OPERAND (t, 2))))
2472 || TREE_CODE (t) == BIT_FIELD_REF
2473 || TREE_CODE (t) == REALPART_EXPR
2474 || TREE_CODE (t) == IMAGPART_EXPR
2475 || TREE_CODE (t) == VIEW_CONVERT_EXPR
1043771b 2476 || CONVERT_EXPR_P (t))
a1b23b2f
UW
2477 t = TREE_OPERAND (t, 0);
2478
2479 if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF)
2480 {
2481 t = get_base_address (t);
6f11d690
RG
2482 if (t && DECL_P (t)
2483 && DECL_MODE (t) != BLKmode)
a1b23b2f
UW
2484 TREE_ADDRESSABLE (t) = 1;
2485 }
2486
2487 *walk_subtrees = 0;
2488 }
2489
2490 return NULL_TREE;
2491}
2492
2493/* RTL expansion is not able to compile array references with variable
2494 offsets for arrays stored in single register. Discover such
2495 expressions and mark variables as addressable to avoid this
2496 scenario. */
2497
2498static void
2499discover_nonconstant_array_refs (void)
2500{
2501 basic_block bb;
726a989a 2502 gimple_stmt_iterator gsi;
a1b23b2f
UW
2503
2504 FOR_EACH_BB (bb)
726a989a
RB
2505 for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
2506 {
2507 gimple stmt = gsi_stmt (gsi);
2508 walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL);
2509 }
a1b23b2f
UW
2510}
2511
2e3f842f
L
2512/* This function sets crtl->args.internal_arg_pointer to a virtual
2513 register if DRAP is needed. Local register allocator will replace
2514 virtual_incoming_args_rtx with the virtual register. */
2515
2516static void
2517expand_stack_alignment (void)
2518{
2519 rtx drap_rtx;
e939805b 2520 unsigned int preferred_stack_boundary;
2e3f842f
L
2521
2522 if (! SUPPORTS_STACK_ALIGNMENT)
2523 return;
2524
2525 if (cfun->calls_alloca
2526 || cfun->has_nonlocal_label
2527 || crtl->has_nonlocal_goto)
2528 crtl->need_drap = true;
2529
2530 gcc_assert (crtl->stack_alignment_needed
2531 <= crtl->stack_alignment_estimated);
2532
2e3f842f
L
2533 /* Update crtl->stack_alignment_estimated and use it later to align
2534 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
2535 exceptions since callgraph doesn't collect incoming stack alignment
2536 in this case. */
2537 if (flag_non_call_exceptions
2538 && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary)
2539 preferred_stack_boundary = PREFERRED_STACK_BOUNDARY;
2540 else
2541 preferred_stack_boundary = crtl->preferred_stack_boundary;
2542 if (preferred_stack_boundary > crtl->stack_alignment_estimated)
2543 crtl->stack_alignment_estimated = preferred_stack_boundary;
2544 if (preferred_stack_boundary > crtl->stack_alignment_needed)
2545 crtl->stack_alignment_needed = preferred_stack_boundary;
2546
2547 crtl->stack_realign_needed
e939805b 2548 = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated;
d2d93c32 2549 crtl->stack_realign_tried = crtl->stack_realign_needed;
2e3f842f
L
2550
2551 crtl->stack_realign_processed = true;
2552
2553 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
2554 alignment. */
2555 gcc_assert (targetm.calls.get_drap_rtx != NULL);
2556 drap_rtx = targetm.calls.get_drap_rtx ();
2557
d015f7cc
L
2558 /* stack_realign_drap and drap_rtx must match. */
2559 gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL));
2560
2e3f842f
L
2561 /* Do nothing if NULL is returned, which means DRAP is not needed. */
2562 if (NULL != drap_rtx)
2563 {
2564 crtl->args.internal_arg_pointer = drap_rtx;
2565
2566 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
2567 needed. */
2568 fixup_tail_calls ();
2569 }
2570}
2571
242229bb
JH
2572/* Translate the intermediate representation contained in the CFG
2573 from GIMPLE trees to RTL.
2574
2575 We do conversion per basic block and preserve/update the tree CFG.
2576 This implies we have to do some magic as the CFG can simultaneously
2577 consist of basic blocks containing RTL and GIMPLE trees. This can
61ada8ae 2578 confuse the CFG hooks, so be careful to not manipulate CFG during
242229bb
JH
2579 the expansion. */
2580
c2924966 2581static unsigned int
726a989a 2582gimple_expand_cfg (void)
242229bb
JH
2583{
2584 basic_block bb, init_block;
2585 sbitmap blocks;
0ef90296
ZD
2586 edge_iterator ei;
2587 edge e;
4e3825db
MM
2588 unsigned i;
2589
2590 rewrite_out_of_ssa (&SA);
2591 SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions,
2592 sizeof (rtx));
242229bb 2593
4586b4ca
SB
2594 /* Some backends want to know that we are expanding to RTL. */
2595 currently_expanding_to_rtl = 1;
2596
bf08ebeb
JH
2597 rtl_profile_for_bb (ENTRY_BLOCK_PTR);
2598
55e092c4 2599 insn_locators_alloc ();
fe8a7779 2600 if (!DECL_IS_BUILTIN (current_function_decl))
1751ecd6
AH
2601 {
2602 /* Eventually, all FEs should explicitly set function_start_locus. */
2603 if (cfun->function_start_locus == UNKNOWN_LOCATION)
2604 set_curr_insn_source_location
2605 (DECL_SOURCE_LOCATION (current_function_decl));
2606 else
2607 set_curr_insn_source_location (cfun->function_start_locus);
2608 }
55e092c4
JH
2609 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2610 prologue_locator = curr_insn_locator ();
2611
2612 /* Make sure first insn is a note even if we don't want linenums.
2613 This makes sure the first insn will never be deleted.
2614 Also, final expects a note to appear there. */
2615 emit_note (NOTE_INSN_DELETED);
6429e3be 2616
a1b23b2f
UW
2617 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
2618 discover_nonconstant_array_refs ();
2619
e41b2a33 2620 targetm.expand_to_rtl_hook ();
cb91fab0 2621 crtl->stack_alignment_needed = STACK_BOUNDARY;
2e3f842f
L
2622 crtl->max_used_stack_slot_alignment = STACK_BOUNDARY;
2623 crtl->stack_alignment_estimated = STACK_BOUNDARY;
cb91fab0
JH
2624 crtl->preferred_stack_boundary = STACK_BOUNDARY;
2625 cfun->cfg->max_jumptable_ents = 0;
2626
e41b2a33 2627
727a31fa 2628 /* Expand the variables recorded during gimple lowering. */
242229bb
JH
2629 expand_used_vars ();
2630
7d69de61
RH
2631 /* Honor stack protection warnings. */
2632 if (warn_stack_protect)
2633 {
e3b5732b 2634 if (cfun->calls_alloca)
c5409249
MLI
2635 warning (OPT_Wstack_protector,
2636 "not protecting local variables: variable length buffer");
cb91fab0 2637 if (has_short_buffer && !crtl->stack_protect_guard)
c5409249
MLI
2638 warning (OPT_Wstack_protector,
2639 "not protecting function: no buffer at least %d bytes long",
7d69de61
RH
2640 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE));
2641 }
2642
242229bb 2643 /* Set up parameters and prepare for return, for the function. */
b79c5284 2644 expand_function_start (current_function_decl);
242229bb 2645
4e3825db
MM
2646 /* Now that we also have the parameter RTXs, copy them over to our
2647 partitions. */
2648 for (i = 0; i < SA.map->num_partitions; i++)
2649 {
2650 tree var = SSA_NAME_VAR (partition_to_var (SA.map, i));
2651
2652 if (TREE_CODE (var) != VAR_DECL
2653 && !SA.partition_to_pseudo[i])
2654 SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var);
2655 gcc_assert (SA.partition_to_pseudo[i]);
eb7adebc
MM
2656
2657 /* If this decl was marked as living in multiple places, reset
2658 this now to NULL. */
2659 if (DECL_RTL_IF_SET (var) == pc_rtx)
2660 SET_DECL_RTL (var, NULL);
2661
4e3825db
MM
2662 /* Some RTL parts really want to look at DECL_RTL(x) when x
2663 was a decl marked in REG_ATTR or MEM_ATTR. We could use
2664 SET_DECL_RTL here making this available, but that would mean
2665 to select one of the potentially many RTLs for one DECL. Instead
2666 of doing that we simply reset the MEM_EXPR of the RTL in question,
2667 then nobody can get at it and hence nobody can call DECL_RTL on it. */
2668 if (!DECL_RTL_SET_P (var))
2669 {
2670 if (MEM_P (SA.partition_to_pseudo[i]))
2671 set_mem_expr (SA.partition_to_pseudo[i], NULL);
2672 }
2673 }
2674
242229bb
JH
2675 /* If this function is `main', emit a call to `__main'
2676 to run global initializers, etc. */
2677 if (DECL_NAME (current_function_decl)
2678 && MAIN_NAME_P (DECL_NAME (current_function_decl))
2679 && DECL_FILE_SCOPE_P (current_function_decl))
2680 expand_main_function ();
2681
7d69de61
RH
2682 /* Initialize the stack_protect_guard field. This must happen after the
2683 call to __main (if any) so that the external decl is initialized. */
cb91fab0 2684 if (crtl->stack_protect_guard)
7d69de61
RH
2685 stack_protect_prologue ();
2686
e939805b
L
2687 /* Update stack boundary if needed. */
2688 if (SUPPORTS_STACK_ALIGNMENT)
2689 {
2690 /* Call update_stack_boundary here to update incoming stack
2691 boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called.
2692 TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate
2693 incoming stack alignment to check if it is OK to perform
2694 sibcall optimization since sibcall optimization will only
2695 align the outgoing stack to incoming stack boundary. */
2696 if (targetm.calls.update_stack_boundary)
2697 targetm.calls.update_stack_boundary ();
2698
2699 /* The incoming stack frame has to be aligned at least at
2700 parm_stack_boundary. */
2701 gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY);
2702 }
2703
4e3825db
MM
2704 expand_phi_nodes (&SA);
2705
3fbd86b1 2706 /* Register rtl specific functions for cfg. */
242229bb
JH
2707 rtl_register_cfg_hooks ();
2708
2709 init_block = construct_init_block ();
2710
0ef90296 2711 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4e3825db 2712 remaining edges later. */
0ef90296
ZD
2713 FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs)
2714 e->flags &= ~EDGE_EXECUTABLE;
2715
8b11009b 2716 lab_rtx_for_bb = pointer_map_create ();
242229bb 2717 FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb)
10d22567 2718 bb = expand_gimple_basic_block (bb);
bf08ebeb 2719
4e3825db
MM
2720 execute_free_datastructures ();
2721 finish_out_of_ssa (&SA);
2722
bf08ebeb
JH
2723 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
2724 conservatively to true until they are all profile aware. */
8b11009b 2725 pointer_map_destroy (lab_rtx_for_bb);
cb91fab0 2726 free_histograms ();
242229bb
JH
2727
2728 construct_exit_block ();
55e092c4
JH
2729 set_curr_insn_block (DECL_INITIAL (current_function_decl));
2730 insn_locators_finalize ();
242229bb 2731
e8a2a782 2732 /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */
242229bb 2733 convert_from_eh_region_ranges ();
e8a2a782 2734 set_eh_throw_stmt_table (cfun, NULL);
242229bb
JH
2735
2736 rebuild_jump_labels (get_insns ());
2737 find_exception_handler_labels ();
2738
4e3825db
MM
2739 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb)
2740 {
2741 edge e;
2742 edge_iterator ei;
2743 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2744 {
2745 if (e->insns.r)
2746 commit_one_edge_insertion (e);
2747 else
2748 ei_next (&ei);
2749 }
2750 }
2751
2752 /* We're done expanding trees to RTL. */
2753 currently_expanding_to_rtl = 0;
2754
2755 FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb)
2756 {
2757 edge e;
2758 edge_iterator ei;
2759 for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); )
2760 {
2761 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
2762 e->flags &= ~EDGE_EXECUTABLE;
2763
2764 /* At the moment not all abnormal edges match the RTL
2765 representation. It is safe to remove them here as
2766 find_many_sub_basic_blocks will rediscover them.
2767 In the future we should get this fixed properly. */
2768 if ((e->flags & EDGE_ABNORMAL)
2769 && !(e->flags & EDGE_SIBCALL))
2770 remove_edge (e);
2771 else
2772 ei_next (&ei);
2773 }
2774 }
2775
242229bb
JH
2776 blocks = sbitmap_alloc (last_basic_block);
2777 sbitmap_ones (blocks);
2778 find_many_sub_basic_blocks (blocks);
242229bb 2779 sbitmap_free (blocks);
4e3825db 2780 purge_all_dead_edges ();
242229bb
JH
2781
2782 compact_blocks ();
2e3f842f
L
2783
2784 expand_stack_alignment ();
2785
242229bb 2786#ifdef ENABLE_CHECKING
62e5bf5d 2787 verify_flow_info ();
242229bb 2788#endif
9f8628ba
PB
2789
2790 /* There's no need to defer outputting this function any more; we
2791 know we want to output it. */
2792 DECL_DEFER_OUTPUT (current_function_decl) = 0;
2793
2794 /* Now that we're done expanding trees to RTL, we shouldn't have any
2795 more CONCATs anywhere. */
2796 generating_concat_p = 0;
2797
b7211528
SB
2798 if (dump_file)
2799 {
2800 fprintf (dump_file,
2801 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
2802 /* And the pass manager will dump RTL for us. */
2803 }
ef330312
PB
2804
2805 /* If we're emitting a nested function, make sure its parent gets
2806 emitted as well. Doing otherwise confuses debug info. */
c22cacf3 2807 {
ef330312
PB
2808 tree parent;
2809 for (parent = DECL_CONTEXT (current_function_decl);
c22cacf3
MS
2810 parent != NULL_TREE;
2811 parent = get_containing_scope (parent))
ef330312 2812 if (TREE_CODE (parent) == FUNCTION_DECL)
c22cacf3 2813 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1;
ef330312 2814 }
c22cacf3 2815
ef330312
PB
2816 /* We are now committed to emitting code for this function. Do any
2817 preparation, such as emitting abstract debug info for the inline
2818 before it gets mangled by optimization. */
2819 if (cgraph_function_possibly_inlined_p (current_function_decl))
2820 (*debug_hooks->outlining_inline_function) (current_function_decl);
2821
2822 TREE_ASM_WRITTEN (current_function_decl) = 1;
4bb1e037
AP
2823
2824 /* After expanding, the return labels are no longer needed. */
2825 return_label = NULL;
2826 naked_return_label = NULL;
55e092c4
JH
2827 /* Tag the blocks with a depth number so that change_scope can find
2828 the common parent easily. */
2829 set_block_levels (DECL_INITIAL (cfun->decl), 0);
bf08ebeb 2830 default_rtl_profile ();
c2924966 2831 return 0;
242229bb
JH
2832}
2833
e3b5732b 2834struct rtl_opt_pass pass_expand =
242229bb 2835{
8ddbbcae 2836 {
e3b5732b 2837 RTL_PASS,
c22cacf3 2838 "expand", /* name */
242229bb 2839 NULL, /* gate */
726a989a 2840 gimple_expand_cfg, /* execute */
242229bb
JH
2841 NULL, /* sub */
2842 NULL, /* next */
2843 0, /* static_pass_number */
c22cacf3 2844 TV_EXPAND, /* tv_id */
4e3825db 2845 PROP_ssa | PROP_gimple_leh | PROP_cfg,/* properties_required */
242229bb 2846 PROP_rtl, /* properties_provided */
4e3825db
MM
2847 PROP_ssa | PROP_trees, /* properties_destroyed */
2848 TODO_verify_ssa | TODO_verify_flow
2849 | TODO_verify_stmts, /* todo_flags_start */
2850 TODO_dump_func
2851 | TODO_ggc_collect /* todo_flags_finish */
8ddbbcae 2852 }
242229bb 2853};