]>
Commit | Line | Data |
---|---|---|
242229bb | 1 | /* A pass for lowering trees to RTL. |
66647d44 JJ |
2 | Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009 |
3 | Free Software Foundation, Inc. | |
242229bb JH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
242229bb JH |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
242229bb JH |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "basic-block.h" | |
29 | #include "function.h" | |
30 | #include "expr.h" | |
31 | #include "langhooks.h" | |
32 | #include "tree-flow.h" | |
33 | #include "timevar.h" | |
34 | #include "tree-dump.h" | |
35 | #include "tree-pass.h" | |
36 | #include "except.h" | |
37 | #include "flags.h" | |
1f6d3a08 RH |
38 | #include "diagnostic.h" |
39 | #include "toplev.h" | |
ef330312 | 40 | #include "debug.h" |
7d69de61 | 41 | #include "params.h" |
ff28a94d | 42 | #include "tree-inline.h" |
6946b3f7 | 43 | #include "value-prof.h" |
e41b2a33 | 44 | #include "target.h" |
7d69de61 | 45 | |
726a989a RB |
46 | |
47 | /* Return an expression tree corresponding to the RHS of GIMPLE | |
48 | statement STMT. */ | |
49 | ||
50 | tree | |
51 | gimple_assign_rhs_to_tree (gimple stmt) | |
52 | { | |
53 | tree t; | |
82d6e6fc | 54 | enum gimple_rhs_class grhs_class; |
726a989a | 55 | |
82d6e6fc | 56 | grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
726a989a | 57 | |
82d6e6fc | 58 | if (grhs_class == GIMPLE_BINARY_RHS) |
726a989a RB |
59 | t = build2 (gimple_assign_rhs_code (stmt), |
60 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
61 | gimple_assign_rhs1 (stmt), | |
62 | gimple_assign_rhs2 (stmt)); | |
82d6e6fc | 63 | else if (grhs_class == GIMPLE_UNARY_RHS) |
726a989a RB |
64 | t = build1 (gimple_assign_rhs_code (stmt), |
65 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
66 | gimple_assign_rhs1 (stmt)); | |
82d6e6fc | 67 | else if (grhs_class == GIMPLE_SINGLE_RHS) |
726a989a RB |
68 | t = gimple_assign_rhs1 (stmt); |
69 | else | |
70 | gcc_unreachable (); | |
71 | ||
72 | return t; | |
73 | } | |
74 | ||
75 | /* Return an expression tree corresponding to the PREDICATE of GIMPLE_COND | |
76 | statement STMT. */ | |
77 | ||
78 | static tree | |
79 | gimple_cond_pred_to_tree (gimple stmt) | |
80 | { | |
81 | return build2 (gimple_cond_code (stmt), boolean_type_node, | |
82 | gimple_cond_lhs (stmt), gimple_cond_rhs (stmt)); | |
83 | } | |
84 | ||
85 | /* Helper for gimple_to_tree. Set EXPR_LOCATION for every expression | |
86 | inside *TP. DATA is the location to set. */ | |
87 | ||
88 | static tree | |
89 | set_expr_location_r (tree *tp, int *ws ATTRIBUTE_UNUSED, void *data) | |
90 | { | |
91 | location_t *loc = (location_t *) data; | |
92 | if (EXPR_P (*tp)) | |
93 | SET_EXPR_LOCATION (*tp, *loc); | |
94 | ||
95 | return NULL_TREE; | |
96 | } | |
97 | ||
98 | ||
99 | /* RTL expansion has traditionally been done on trees, so the | |
100 | transition to doing it on GIMPLE tuples is very invasive to the RTL | |
101 | expander. To facilitate the transition, this function takes a | |
102 | GIMPLE tuple STMT and returns the same statement in the form of a | |
103 | tree. */ | |
104 | ||
105 | static tree | |
106 | gimple_to_tree (gimple stmt) | |
107 | { | |
108 | tree t; | |
109 | int rn; | |
110 | tree_ann_common_t ann; | |
111 | location_t loc; | |
112 | ||
113 | switch (gimple_code (stmt)) | |
114 | { | |
115 | case GIMPLE_ASSIGN: | |
116 | { | |
117 | tree lhs = gimple_assign_lhs (stmt); | |
118 | ||
119 | t = gimple_assign_rhs_to_tree (stmt); | |
120 | t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); | |
121 | if (gimple_assign_nontemporal_move_p (stmt)) | |
122 | MOVE_NONTEMPORAL (t) = true; | |
123 | } | |
124 | break; | |
125 | ||
126 | case GIMPLE_COND: | |
127 | t = gimple_cond_pred_to_tree (stmt); | |
128 | t = build3 (COND_EXPR, void_type_node, t, NULL_TREE, NULL_TREE); | |
129 | break; | |
130 | ||
131 | case GIMPLE_GOTO: | |
132 | t = build1 (GOTO_EXPR, void_type_node, gimple_goto_dest (stmt)); | |
133 | break; | |
134 | ||
135 | case GIMPLE_LABEL: | |
136 | t = build1 (LABEL_EXPR, void_type_node, gimple_label_label (stmt)); | |
137 | break; | |
138 | ||
139 | case GIMPLE_RETURN: | |
140 | { | |
141 | tree retval = gimple_return_retval (stmt); | |
142 | ||
143 | if (retval && retval != error_mark_node) | |
144 | { | |
145 | tree result = DECL_RESULT (current_function_decl); | |
146 | ||
147 | /* If we are not returning the current function's RESULT_DECL, | |
148 | build an assignment to it. */ | |
149 | if (retval != result) | |
150 | { | |
151 | /* I believe that a function's RESULT_DECL is unique. */ | |
152 | gcc_assert (TREE_CODE (retval) != RESULT_DECL); | |
153 | ||
154 | retval = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
155 | result, retval); | |
156 | } | |
157 | } | |
158 | t = build1 (RETURN_EXPR, void_type_node, retval); | |
159 | } | |
160 | break; | |
161 | ||
162 | case GIMPLE_ASM: | |
163 | { | |
164 | size_t i, n; | |
165 | tree out, in, cl; | |
166 | const char *s; | |
167 | ||
168 | out = NULL_TREE; | |
169 | n = gimple_asm_noutputs (stmt); | |
170 | if (n > 0) | |
171 | { | |
172 | t = out = gimple_asm_output_op (stmt, 0); | |
173 | for (i = 1; i < n; i++) | |
174 | { | |
175 | TREE_CHAIN (t) = gimple_asm_output_op (stmt, i); | |
176 | t = gimple_asm_output_op (stmt, i); | |
177 | } | |
178 | } | |
179 | ||
180 | in = NULL_TREE; | |
181 | n = gimple_asm_ninputs (stmt); | |
182 | if (n > 0) | |
183 | { | |
184 | t = in = gimple_asm_input_op (stmt, 0); | |
185 | for (i = 1; i < n; i++) | |
186 | { | |
187 | TREE_CHAIN (t) = gimple_asm_input_op (stmt, i); | |
188 | t = gimple_asm_input_op (stmt, i); | |
189 | } | |
190 | } | |
191 | ||
192 | cl = NULL_TREE; | |
193 | n = gimple_asm_nclobbers (stmt); | |
194 | if (n > 0) | |
195 | { | |
196 | t = cl = gimple_asm_clobber_op (stmt, 0); | |
197 | for (i = 1; i < n; i++) | |
198 | { | |
199 | TREE_CHAIN (t) = gimple_asm_clobber_op (stmt, i); | |
200 | t = gimple_asm_clobber_op (stmt, i); | |
201 | } | |
202 | } | |
203 | ||
204 | s = gimple_asm_string (stmt); | |
205 | t = build4 (ASM_EXPR, void_type_node, build_string (strlen (s), s), | |
206 | out, in, cl); | |
207 | ASM_VOLATILE_P (t) = gimple_asm_volatile_p (stmt); | |
208 | ASM_INPUT_P (t) = gimple_asm_input_p (stmt); | |
209 | } | |
210 | break; | |
211 | ||
212 | case GIMPLE_CALL: | |
213 | { | |
214 | size_t i; | |
215 | tree fn; | |
216 | tree_ann_common_t ann; | |
217 | ||
218 | t = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
219 | ||
7c9577be | 220 | CALL_EXPR_FN (t) = gimple_call_fn (stmt); |
726a989a | 221 | TREE_TYPE (t) = gimple_call_return_type (stmt); |
726a989a RB |
222 | CALL_EXPR_STATIC_CHAIN (t) = gimple_call_chain (stmt); |
223 | ||
224 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
225 | CALL_EXPR_ARG (t, i) = gimple_call_arg (stmt, i); | |
226 | ||
227 | if (!(gimple_call_flags (stmt) & (ECF_CONST | ECF_PURE))) | |
228 | TREE_SIDE_EFFECTS (t) = 1; | |
229 | ||
230 | if (gimple_call_flags (stmt) & ECF_NOTHROW) | |
231 | TREE_NOTHROW (t) = 1; | |
232 | ||
233 | CALL_EXPR_TAILCALL (t) = gimple_call_tail_p (stmt); | |
234 | CALL_EXPR_RETURN_SLOT_OPT (t) = gimple_call_return_slot_opt_p (stmt); | |
235 | CALL_FROM_THUNK_P (t) = gimple_call_from_thunk_p (stmt); | |
236 | CALL_CANNOT_INLINE_P (t) = gimple_call_cannot_inline_p (stmt); | |
237 | CALL_EXPR_VA_ARG_PACK (t) = gimple_call_va_arg_pack_p (stmt); | |
238 | ||
239 | /* If the call has a LHS then create a MODIFY_EXPR to hold it. */ | |
240 | { | |
241 | tree lhs = gimple_call_lhs (stmt); | |
242 | ||
243 | if (lhs) | |
244 | t = build2 (MODIFY_EXPR, TREE_TYPE (lhs), lhs, t); | |
245 | } | |
246 | ||
247 | /* Record the original call statement, as it may be used | |
248 | to retrieve profile information during expansion. */ | |
7c9577be RG |
249 | |
250 | if ((fn = gimple_call_fndecl (stmt)) != NULL_TREE | |
251 | && DECL_BUILT_IN (fn)) | |
726a989a RB |
252 | { |
253 | ann = get_tree_common_ann (t); | |
254 | ann->stmt = stmt; | |
255 | } | |
256 | } | |
257 | break; | |
258 | ||
259 | case GIMPLE_SWITCH: | |
260 | { | |
261 | tree label_vec; | |
262 | size_t i; | |
263 | tree elt = gimple_switch_label (stmt, 0); | |
264 | ||
265 | label_vec = make_tree_vec (gimple_switch_num_labels (stmt)); | |
266 | ||
267 | if (!CASE_LOW (elt) && !CASE_HIGH (elt)) | |
268 | { | |
269 | for (i = 1; i < gimple_switch_num_labels (stmt); i++) | |
270 | TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, i); | |
271 | ||
272 | /* The default case in a SWITCH_EXPR must be at the end of | |
273 | the label vector. */ | |
274 | TREE_VEC_ELT (label_vec, i - 1) = gimple_switch_label (stmt, 0); | |
275 | } | |
276 | else | |
277 | { | |
278 | for (i = 0; i < gimple_switch_num_labels (stmt); i++) | |
279 | TREE_VEC_ELT (label_vec, i) = gimple_switch_label (stmt, i); | |
280 | } | |
281 | ||
282 | t = build3 (SWITCH_EXPR, void_type_node, gimple_switch_index (stmt), | |
283 | NULL, label_vec); | |
284 | } | |
285 | break; | |
286 | ||
287 | case GIMPLE_NOP: | |
288 | case GIMPLE_PREDICT: | |
289 | t = build1 (NOP_EXPR, void_type_node, size_zero_node); | |
290 | break; | |
291 | ||
292 | case GIMPLE_RESX: | |
293 | t = build_resx (gimple_resx_region (stmt)); | |
294 | break; | |
295 | ||
296 | default: | |
297 | if (errorcount == 0) | |
298 | { | |
299 | error ("Unrecognized GIMPLE statement during RTL expansion"); | |
300 | print_gimple_stmt (stderr, stmt, 4, 0); | |
301 | gcc_unreachable (); | |
302 | } | |
303 | else | |
304 | { | |
305 | /* Ignore any bad gimple codes if we're going to die anyhow, | |
306 | so we can at least set TREE_ASM_WRITTEN and have the rest | |
307 | of compilation advance without sudden ICE death. */ | |
308 | t = build1 (NOP_EXPR, void_type_node, size_zero_node); | |
309 | break; | |
310 | } | |
311 | } | |
312 | ||
313 | /* If STMT is inside an exception region, record it in the generated | |
314 | expression. */ | |
315 | rn = lookup_stmt_eh_region (stmt); | |
316 | if (rn >= 0) | |
317 | { | |
318 | tree call = get_call_expr_in (t); | |
319 | ||
320 | ann = get_tree_common_ann (t); | |
321 | ann->rn = rn; | |
322 | ||
323 | /* For a CALL_EXPR on the RHS of an assignment, calls.c looks up | |
324 | the CALL_EXPR not the assignment statment for EH region number. */ | |
325 | if (call && call != t) | |
326 | { | |
327 | ann = get_tree_common_ann (call); | |
328 | ann->rn = rn; | |
329 | } | |
330 | } | |
331 | ||
332 | /* Set EXPR_LOCATION in all the embedded expressions. */ | |
333 | loc = gimple_location (stmt); | |
334 | walk_tree (&t, set_expr_location_r, (void *) &loc, NULL); | |
335 | ||
336 | TREE_BLOCK (t) = gimple_block (stmt); | |
337 | ||
338 | return t; | |
339 | } | |
340 | ||
341 | ||
342 | /* Release back to GC memory allocated by gimple_to_tree. */ | |
343 | ||
344 | static void | |
345 | release_stmt_tree (gimple stmt, tree stmt_tree) | |
346 | { | |
347 | tree_ann_common_t ann; | |
348 | ||
349 | switch (gimple_code (stmt)) | |
350 | { | |
351 | case GIMPLE_ASSIGN: | |
352 | if (get_gimple_rhs_class (gimple_expr_code (stmt)) != GIMPLE_SINGLE_RHS) | |
353 | ggc_free (TREE_OPERAND (stmt_tree, 1)); | |
354 | break; | |
355 | case GIMPLE_COND: | |
356 | ggc_free (COND_EXPR_COND (stmt_tree)); | |
357 | break; | |
358 | case GIMPLE_RETURN: | |
359 | if (TREE_OPERAND (stmt_tree, 0) | |
360 | && TREE_CODE (TREE_OPERAND (stmt_tree, 0)) == MODIFY_EXPR) | |
361 | ggc_free (TREE_OPERAND (stmt_tree, 0)); | |
362 | break; | |
363 | case GIMPLE_CALL: | |
364 | if (gimple_call_lhs (stmt)) | |
365 | { | |
726a989a RB |
366 | ann = tree_common_ann (TREE_OPERAND (stmt_tree, 1)); |
367 | if (ann) | |
368 | ggc_free (ann); | |
369 | ggc_free (TREE_OPERAND (stmt_tree, 1)); | |
370 | } | |
726a989a RB |
371 | break; |
372 | default: | |
373 | break; | |
374 | } | |
375 | ann = tree_common_ann (stmt_tree); | |
376 | if (ann) | |
377 | ggc_free (ann); | |
378 | ggc_free (stmt_tree); | |
379 | } | |
380 | ||
381 | ||
e53de54d JH |
382 | /* Verify that there is exactly single jump instruction since last and attach |
383 | REG_BR_PROB note specifying probability. | |
384 | ??? We really ought to pass the probability down to RTL expanders and let it | |
d7e9e62a KH |
385 | re-distribute it when the conditional expands into multiple conditionals. |
386 | This is however difficult to do. */ | |
ef950eba | 387 | void |
10d22567 | 388 | add_reg_br_prob_note (rtx last, int probability) |
e53de54d JH |
389 | { |
390 | if (profile_status == PROFILE_ABSENT) | |
391 | return; | |
392 | for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) | |
2ca202e7 | 393 | if (JUMP_P (last)) |
e53de54d JH |
394 | { |
395 | /* It is common to emit condjump-around-jump sequence when we don't know | |
396 | how to reverse the conditional. Special case this. */ | |
397 | if (!any_condjump_p (last) | |
2ca202e7 | 398 | || !JUMP_P (NEXT_INSN (last)) |
e53de54d | 399 | || !simplejump_p (NEXT_INSN (last)) |
fa1ff4eb | 400 | || !NEXT_INSN (NEXT_INSN (last)) |
2ca202e7 | 401 | || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) |
fa1ff4eb | 402 | || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) |
2ca202e7 | 403 | || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) |
e53de54d JH |
404 | || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) |
405 | goto failed; | |
41806d92 | 406 | gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); |
65c5f2a6 ILT |
407 | add_reg_note (last, REG_BR_PROB, |
408 | GEN_INT (REG_BR_PROB_BASE - probability)); | |
e53de54d JH |
409 | return; |
410 | } | |
2ca202e7 | 411 | if (!last || !JUMP_P (last) || !any_condjump_p (last)) |
41806d92 NS |
412 | goto failed; |
413 | gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
65c5f2a6 | 414 | add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); |
e53de54d JH |
415 | return; |
416 | failed: | |
417 | if (dump_file) | |
418 | fprintf (dump_file, "Failed to add probability note\n"); | |
419 | } | |
420 | ||
80c7a9eb | 421 | |
1f6d3a08 RH |
422 | #ifndef STACK_ALIGNMENT_NEEDED |
423 | #define STACK_ALIGNMENT_NEEDED 1 | |
424 | #endif | |
425 | ||
1f6d3a08 RH |
426 | |
427 | /* This structure holds data relevant to one variable that will be | |
428 | placed in a stack slot. */ | |
429 | struct stack_var | |
430 | { | |
431 | /* The Variable. */ | |
432 | tree decl; | |
433 | ||
434 | /* The offset of the variable. During partitioning, this is the | |
435 | offset relative to the partition. After partitioning, this | |
436 | is relative to the stack frame. */ | |
437 | HOST_WIDE_INT offset; | |
438 | ||
439 | /* Initially, the size of the variable. Later, the size of the partition, | |
440 | if this variable becomes it's partition's representative. */ | |
441 | HOST_WIDE_INT size; | |
442 | ||
443 | /* The *byte* alignment required for this variable. Or as, with the | |
444 | size, the alignment for this partition. */ | |
445 | unsigned int alignb; | |
446 | ||
447 | /* The partition representative. */ | |
448 | size_t representative; | |
449 | ||
450 | /* The next stack variable in the partition, or EOC. */ | |
451 | size_t next; | |
452 | }; | |
453 | ||
454 | #define EOC ((size_t)-1) | |
455 | ||
456 | /* We have an array of such objects while deciding allocation. */ | |
457 | static struct stack_var *stack_vars; | |
458 | static size_t stack_vars_alloc; | |
459 | static size_t stack_vars_num; | |
460 | ||
fa10beec | 461 | /* An array of indices such that stack_vars[stack_vars_sorted[i]].size |
1f6d3a08 RH |
462 | is non-decreasing. */ |
463 | static size_t *stack_vars_sorted; | |
464 | ||
465 | /* We have an interference graph between such objects. This graph | |
466 | is lower triangular. */ | |
467 | static bool *stack_vars_conflict; | |
468 | static size_t stack_vars_conflict_alloc; | |
469 | ||
470 | /* The phase of the stack frame. This is the known misalignment of | |
471 | virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, | |
472 | (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ | |
473 | static int frame_phase; | |
474 | ||
7d69de61 RH |
475 | /* Used during expand_used_vars to remember if we saw any decls for |
476 | which we'd like to enable stack smashing protection. */ | |
477 | static bool has_protected_decls; | |
478 | ||
479 | /* Used during expand_used_vars. Remember if we say a character buffer | |
480 | smaller than our cutoff threshold. Used for -Wstack-protector. */ | |
481 | static bool has_short_buffer; | |
1f6d3a08 RH |
482 | |
483 | /* Discover the byte alignment to use for DECL. Ignore alignment | |
484 | we can't do with expected alignment of the stack boundary. */ | |
485 | ||
486 | static unsigned int | |
487 | get_decl_align_unit (tree decl) | |
488 | { | |
489 | unsigned int align; | |
490 | ||
9bfaf89d | 491 | align = LOCAL_DECL_ALIGNMENT (decl); |
2e3f842f L |
492 | |
493 | if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
494 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
495 | ||
496 | if (SUPPORTS_STACK_ALIGNMENT) | |
497 | { | |
498 | if (crtl->stack_alignment_estimated < align) | |
499 | { | |
500 | gcc_assert(!crtl->stack_realign_processed); | |
501 | crtl->stack_alignment_estimated = align; | |
502 | } | |
503 | } | |
504 | ||
505 | /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
506 | So here we only make sure stack_alignment_needed >= align. */ | |
cb91fab0 JH |
507 | if (crtl->stack_alignment_needed < align) |
508 | crtl->stack_alignment_needed = align; | |
2e3f842f L |
509 | if (crtl->max_used_stack_slot_alignment < crtl->stack_alignment_needed) |
510 | crtl->max_used_stack_slot_alignment = crtl->stack_alignment_needed; | |
1f6d3a08 RH |
511 | |
512 | return align / BITS_PER_UNIT; | |
513 | } | |
514 | ||
515 | /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | |
516 | Return the frame offset. */ | |
517 | ||
518 | static HOST_WIDE_INT | |
519 | alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) | |
520 | { | |
521 | HOST_WIDE_INT offset, new_frame_offset; | |
522 | ||
523 | new_frame_offset = frame_offset; | |
524 | if (FRAME_GROWS_DOWNWARD) | |
525 | { | |
526 | new_frame_offset -= size + frame_phase; | |
527 | new_frame_offset &= -align; | |
528 | new_frame_offset += frame_phase; | |
529 | offset = new_frame_offset; | |
530 | } | |
531 | else | |
532 | { | |
533 | new_frame_offset -= frame_phase; | |
534 | new_frame_offset += align - 1; | |
535 | new_frame_offset &= -align; | |
536 | new_frame_offset += frame_phase; | |
537 | offset = new_frame_offset; | |
538 | new_frame_offset += size; | |
539 | } | |
540 | frame_offset = new_frame_offset; | |
541 | ||
9fb798d7 EB |
542 | if (frame_offset_overflow (frame_offset, cfun->decl)) |
543 | frame_offset = offset = 0; | |
544 | ||
1f6d3a08 RH |
545 | return offset; |
546 | } | |
547 | ||
548 | /* Accumulate DECL into STACK_VARS. */ | |
549 | ||
550 | static void | |
551 | add_stack_var (tree decl) | |
552 | { | |
553 | if (stack_vars_num >= stack_vars_alloc) | |
554 | { | |
555 | if (stack_vars_alloc) | |
556 | stack_vars_alloc = stack_vars_alloc * 3 / 2; | |
557 | else | |
558 | stack_vars_alloc = 32; | |
559 | stack_vars | |
560 | = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | |
561 | } | |
562 | stack_vars[stack_vars_num].decl = decl; | |
563 | stack_vars[stack_vars_num].offset = 0; | |
564 | stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (decl), 1); | |
565 | stack_vars[stack_vars_num].alignb = get_decl_align_unit (decl); | |
566 | ||
567 | /* All variables are initially in their own partition. */ | |
568 | stack_vars[stack_vars_num].representative = stack_vars_num; | |
569 | stack_vars[stack_vars_num].next = EOC; | |
570 | ||
571 | /* Ensure that this decl doesn't get put onto the list twice. */ | |
572 | SET_DECL_RTL (decl, pc_rtx); | |
573 | ||
574 | stack_vars_num++; | |
575 | } | |
576 | ||
577 | /* Compute the linear index of a lower-triangular coordinate (I, J). */ | |
578 | ||
579 | static size_t | |
580 | triangular_index (size_t i, size_t j) | |
581 | { | |
582 | if (i < j) | |
583 | { | |
584 | size_t t; | |
585 | t = i, i = j, j = t; | |
586 | } | |
587 | return (i * (i + 1)) / 2 + j; | |
588 | } | |
589 | ||
590 | /* Ensure that STACK_VARS_CONFLICT is large enough for N objects. */ | |
591 | ||
592 | static void | |
593 | resize_stack_vars_conflict (size_t n) | |
594 | { | |
595 | size_t size = triangular_index (n-1, n-1) + 1; | |
596 | ||
597 | if (size <= stack_vars_conflict_alloc) | |
598 | return; | |
599 | ||
600 | stack_vars_conflict = XRESIZEVEC (bool, stack_vars_conflict, size); | |
601 | memset (stack_vars_conflict + stack_vars_conflict_alloc, 0, | |
602 | (size - stack_vars_conflict_alloc) * sizeof (bool)); | |
603 | stack_vars_conflict_alloc = size; | |
604 | } | |
605 | ||
606 | /* Make the decls associated with luid's X and Y conflict. */ | |
607 | ||
608 | static void | |
609 | add_stack_var_conflict (size_t x, size_t y) | |
610 | { | |
611 | size_t index = triangular_index (x, y); | |
612 | gcc_assert (index < stack_vars_conflict_alloc); | |
613 | stack_vars_conflict[index] = true; | |
614 | } | |
615 | ||
616 | /* Check whether the decls associated with luid's X and Y conflict. */ | |
617 | ||
618 | static bool | |
619 | stack_var_conflict_p (size_t x, size_t y) | |
620 | { | |
621 | size_t index = triangular_index (x, y); | |
622 | gcc_assert (index < stack_vars_conflict_alloc); | |
623 | return stack_vars_conflict[index]; | |
624 | } | |
d239ed56 SB |
625 | |
626 | /* Returns true if TYPE is or contains a union type. */ | |
627 | ||
628 | static bool | |
629 | aggregate_contains_union_type (tree type) | |
630 | { | |
631 | tree field; | |
632 | ||
633 | if (TREE_CODE (type) == UNION_TYPE | |
634 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
635 | return true; | |
636 | if (TREE_CODE (type) == ARRAY_TYPE) | |
637 | return aggregate_contains_union_type (TREE_TYPE (type)); | |
638 | if (TREE_CODE (type) != RECORD_TYPE) | |
639 | return false; | |
640 | ||
641 | for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
642 | if (TREE_CODE (field) == FIELD_DECL) | |
643 | if (aggregate_contains_union_type (TREE_TYPE (field))) | |
644 | return true; | |
645 | ||
646 | return false; | |
647 | } | |
648 | ||
1f6d3a08 RH |
649 | /* A subroutine of expand_used_vars. If two variables X and Y have alias |
650 | sets that do not conflict, then do add a conflict for these variables | |
d239ed56 SB |
651 | in the interference graph. We also need to make sure to add conflicts |
652 | for union containing structures. Else RTL alias analysis comes along | |
653 | and due to type based aliasing rules decides that for two overlapping | |
654 | union temporaries { short s; int i; } accesses to the same mem through | |
655 | different types may not alias and happily reorders stores across | |
656 | life-time boundaries of the temporaries (See PR25654). | |
657 | We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ | |
1f6d3a08 RH |
658 | |
659 | static void | |
660 | add_alias_set_conflicts (void) | |
661 | { | |
662 | size_t i, j, n = stack_vars_num; | |
663 | ||
664 | for (i = 0; i < n; ++i) | |
665 | { | |
a4d25453 RH |
666 | tree type_i = TREE_TYPE (stack_vars[i].decl); |
667 | bool aggr_i = AGGREGATE_TYPE_P (type_i); | |
d239ed56 | 668 | bool contains_union; |
1f6d3a08 | 669 | |
d239ed56 | 670 | contains_union = aggregate_contains_union_type (type_i); |
1f6d3a08 RH |
671 | for (j = 0; j < i; ++j) |
672 | { | |
a4d25453 RH |
673 | tree type_j = TREE_TYPE (stack_vars[j].decl); |
674 | bool aggr_j = AGGREGATE_TYPE_P (type_j); | |
d239ed56 SB |
675 | if (aggr_i != aggr_j |
676 | /* Either the objects conflict by means of type based | |
677 | aliasing rules, or we need to add a conflict. */ | |
678 | || !objects_must_conflict_p (type_i, type_j) | |
679 | /* In case the types do not conflict ensure that access | |
680 | to elements will conflict. In case of unions we have | |
681 | to be careful as type based aliasing rules may say | |
682 | access to the same memory does not conflict. So play | |
683 | safe and add a conflict in this case. */ | |
684 | || contains_union) | |
1f6d3a08 RH |
685 | add_stack_var_conflict (i, j); |
686 | } | |
687 | } | |
688 | } | |
689 | ||
690 | /* A subroutine of partition_stack_vars. A comparison function for qsort, | |
fa10beec | 691 | sorting an array of indices by the size of the object. */ |
1f6d3a08 RH |
692 | |
693 | static int | |
694 | stack_var_size_cmp (const void *a, const void *b) | |
695 | { | |
696 | HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | |
697 | HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | |
79f802f5 RG |
698 | unsigned int uida = DECL_UID (stack_vars[*(const size_t *)a].decl); |
699 | unsigned int uidb = DECL_UID (stack_vars[*(const size_t *)b].decl); | |
1f6d3a08 RH |
700 | |
701 | if (sa < sb) | |
702 | return -1; | |
703 | if (sa > sb) | |
704 | return 1; | |
79f802f5 RG |
705 | /* For stack variables of the same size use the uid of the decl |
706 | to make the sort stable. */ | |
707 | if (uida < uidb) | |
708 | return -1; | |
709 | if (uida > uidb) | |
710 | return 1; | |
1f6d3a08 RH |
711 | return 0; |
712 | } | |
713 | ||
714 | /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND | |
715 | partitioning algorithm. Partitions A and B are known to be non-conflicting. | |
716 | Merge them into a single partition A. | |
717 | ||
718 | At the same time, add OFFSET to all variables in partition B. At the end | |
719 | of the partitioning process we've have a nice block easy to lay out within | |
720 | the stack frame. */ | |
721 | ||
722 | static void | |
723 | union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | |
724 | { | |
725 | size_t i, last; | |
726 | ||
727 | /* Update each element of partition B with the given offset, | |
728 | and merge them into partition A. */ | |
729 | for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | |
730 | { | |
731 | stack_vars[i].offset += offset; | |
732 | stack_vars[i].representative = a; | |
733 | } | |
734 | stack_vars[last].next = stack_vars[a].next; | |
735 | stack_vars[a].next = b; | |
736 | ||
737 | /* Update the required alignment of partition A to account for B. */ | |
738 | if (stack_vars[a].alignb < stack_vars[b].alignb) | |
739 | stack_vars[a].alignb = stack_vars[b].alignb; | |
740 | ||
741 | /* Update the interference graph and merge the conflicts. */ | |
742 | for (last = stack_vars_num, i = 0; i < last; ++i) | |
743 | if (stack_var_conflict_p (b, i)) | |
744 | add_stack_var_conflict (a, i); | |
745 | } | |
746 | ||
747 | /* A subroutine of expand_used_vars. Binpack the variables into | |
748 | partitions constrained by the interference graph. The overall | |
749 | algorithm used is as follows: | |
750 | ||
751 | Sort the objects by size. | |
752 | For each object A { | |
753 | S = size(A) | |
754 | O = 0 | |
755 | loop { | |
756 | Look for the largest non-conflicting object B with size <= S. | |
757 | UNION (A, B) | |
758 | offset(B) = O | |
759 | O += size(B) | |
760 | S -= size(B) | |
761 | } | |
762 | } | |
763 | */ | |
764 | ||
765 | static void | |
766 | partition_stack_vars (void) | |
767 | { | |
768 | size_t si, sj, n = stack_vars_num; | |
769 | ||
770 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
771 | for (si = 0; si < n; ++si) | |
772 | stack_vars_sorted[si] = si; | |
773 | ||
774 | if (n == 1) | |
775 | return; | |
776 | ||
777 | qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | |
778 | ||
779 | /* Special case: detect when all variables conflict, and thus we can't | |
780 | do anything during the partitioning loop. It isn't uncommon (with | |
781 | C code at least) to declare all variables at the top of the function, | |
782 | and if we're not inlining, then all variables will be in the same scope. | |
783 | Take advantage of very fast libc routines for this scan. */ | |
784 | gcc_assert (sizeof(bool) == sizeof(char)); | |
785 | if (memchr (stack_vars_conflict, false, stack_vars_conflict_alloc) == NULL) | |
786 | return; | |
787 | ||
788 | for (si = 0; si < n; ++si) | |
789 | { | |
790 | size_t i = stack_vars_sorted[si]; | |
791 | HOST_WIDE_INT isize = stack_vars[i].size; | |
792 | HOST_WIDE_INT offset = 0; | |
793 | ||
794 | for (sj = si; sj-- > 0; ) | |
795 | { | |
796 | size_t j = stack_vars_sorted[sj]; | |
797 | HOST_WIDE_INT jsize = stack_vars[j].size; | |
798 | unsigned int jalign = stack_vars[j].alignb; | |
799 | ||
800 | /* Ignore objects that aren't partition representatives. */ | |
801 | if (stack_vars[j].representative != j) | |
802 | continue; | |
803 | ||
804 | /* Ignore objects too large for the remaining space. */ | |
805 | if (isize < jsize) | |
806 | continue; | |
807 | ||
808 | /* Ignore conflicting objects. */ | |
809 | if (stack_var_conflict_p (i, j)) | |
810 | continue; | |
811 | ||
812 | /* Refine the remaining space check to include alignment. */ | |
813 | if (offset & (jalign - 1)) | |
814 | { | |
815 | HOST_WIDE_INT toff = offset; | |
816 | toff += jalign - 1; | |
817 | toff &= -(HOST_WIDE_INT)jalign; | |
818 | if (isize - (toff - offset) < jsize) | |
819 | continue; | |
820 | ||
821 | isize -= toff - offset; | |
822 | offset = toff; | |
823 | } | |
824 | ||
825 | /* UNION the objects, placing J at OFFSET. */ | |
826 | union_stack_vars (i, j, offset); | |
827 | ||
828 | isize -= jsize; | |
829 | if (isize == 0) | |
830 | break; | |
831 | } | |
832 | } | |
833 | } | |
834 | ||
835 | /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | |
836 | ||
837 | static void | |
838 | dump_stack_var_partition (void) | |
839 | { | |
840 | size_t si, i, j, n = stack_vars_num; | |
841 | ||
842 | for (si = 0; si < n; ++si) | |
843 | { | |
844 | i = stack_vars_sorted[si]; | |
845 | ||
846 | /* Skip variables that aren't partition representatives, for now. */ | |
847 | if (stack_vars[i].representative != i) | |
848 | continue; | |
849 | ||
850 | fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | |
851 | " align %u\n", (unsigned long) i, stack_vars[i].size, | |
852 | stack_vars[i].alignb); | |
853 | ||
854 | for (j = i; j != EOC; j = stack_vars[j].next) | |
855 | { | |
856 | fputc ('\t', dump_file); | |
857 | print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | |
858 | fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | |
1c50a20a | 859 | stack_vars[j].offset); |
1f6d3a08 RH |
860 | } |
861 | } | |
862 | } | |
863 | ||
864 | /* Assign rtl to DECL at frame offset OFFSET. */ | |
865 | ||
866 | static void | |
867 | expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | |
868 | { | |
2ac26e15 L |
869 | /* Alignment is unsigned. */ |
870 | unsigned HOST_WIDE_INT align; | |
1f6d3a08 | 871 | rtx x; |
c22cacf3 | 872 | |
1f6d3a08 RH |
873 | /* If this fails, we've overflowed the stack frame. Error nicely? */ |
874 | gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | |
875 | ||
876 | x = plus_constant (virtual_stack_vars_rtx, offset); | |
877 | x = gen_rtx_MEM (DECL_MODE (decl), x); | |
878 | ||
879 | /* Set alignment we actually gave this decl. */ | |
880 | offset -= frame_phase; | |
881 | align = offset & -offset; | |
882 | align *= BITS_PER_UNIT; | |
2ac26e15 | 883 | if (align == 0) |
1f6d3a08 | 884 | align = STACK_BOUNDARY; |
2ac26e15 L |
885 | else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) |
886 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
1f6d3a08 RH |
887 | DECL_ALIGN (decl) = align; |
888 | DECL_USER_ALIGN (decl) = 0; | |
889 | ||
890 | set_mem_attributes (x, decl, true); | |
891 | SET_DECL_RTL (decl, x); | |
892 | } | |
893 | ||
894 | /* A subroutine of expand_used_vars. Give each partition representative | |
895 | a unique location within the stack frame. Update each partition member | |
896 | with that location. */ | |
897 | ||
898 | static void | |
7d69de61 | 899 | expand_stack_vars (bool (*pred) (tree)) |
1f6d3a08 RH |
900 | { |
901 | size_t si, i, j, n = stack_vars_num; | |
902 | ||
903 | for (si = 0; si < n; ++si) | |
904 | { | |
905 | HOST_WIDE_INT offset; | |
906 | ||
907 | i = stack_vars_sorted[si]; | |
908 | ||
909 | /* Skip variables that aren't partition representatives, for now. */ | |
910 | if (stack_vars[i].representative != i) | |
911 | continue; | |
912 | ||
7d69de61 RH |
913 | /* Skip variables that have already had rtl assigned. See also |
914 | add_stack_var where we perpetrate this pc_rtx hack. */ | |
915 | if (DECL_RTL (stack_vars[i].decl) != pc_rtx) | |
916 | continue; | |
917 | ||
c22cacf3 | 918 | /* Check the predicate to see whether this variable should be |
7d69de61 RH |
919 | allocated in this pass. */ |
920 | if (pred && !pred (stack_vars[i].decl)) | |
921 | continue; | |
922 | ||
1f6d3a08 RH |
923 | offset = alloc_stack_frame_space (stack_vars[i].size, |
924 | stack_vars[i].alignb); | |
925 | ||
926 | /* Create rtl for each variable based on their location within the | |
927 | partition. */ | |
928 | for (j = i; j != EOC; j = stack_vars[j].next) | |
f8da8190 AP |
929 | { |
930 | gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | |
931 | expand_one_stack_var_at (stack_vars[j].decl, | |
932 | stack_vars[j].offset + offset); | |
933 | } | |
1f6d3a08 RH |
934 | } |
935 | } | |
936 | ||
ff28a94d JH |
937 | /* Take into account all sizes of partitions and reset DECL_RTLs. */ |
938 | static HOST_WIDE_INT | |
939 | account_stack_vars (void) | |
940 | { | |
941 | size_t si, j, i, n = stack_vars_num; | |
942 | HOST_WIDE_INT size = 0; | |
943 | ||
944 | for (si = 0; si < n; ++si) | |
945 | { | |
946 | i = stack_vars_sorted[si]; | |
947 | ||
948 | /* Skip variables that aren't partition representatives, for now. */ | |
949 | if (stack_vars[i].representative != i) | |
950 | continue; | |
951 | ||
952 | size += stack_vars[i].size; | |
953 | for (j = i; j != EOC; j = stack_vars[j].next) | |
954 | SET_DECL_RTL (stack_vars[j].decl, NULL); | |
955 | } | |
956 | return size; | |
957 | } | |
958 | ||
1f6d3a08 RH |
959 | /* A subroutine of expand_one_var. Called to immediately assign rtl |
960 | to a variable to be allocated in the stack frame. */ | |
961 | ||
962 | static void | |
963 | expand_one_stack_var (tree var) | |
964 | { | |
965 | HOST_WIDE_INT size, offset, align; | |
966 | ||
967 | size = tree_low_cst (DECL_SIZE_UNIT (var), 1); | |
968 | align = get_decl_align_unit (var); | |
969 | offset = alloc_stack_frame_space (size, align); | |
970 | ||
971 | expand_one_stack_var_at (var, offset); | |
972 | } | |
973 | ||
1f6d3a08 RH |
974 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
975 | that will reside in a hard register. */ | |
976 | ||
977 | static void | |
978 | expand_one_hard_reg_var (tree var) | |
979 | { | |
980 | rest_of_decl_compilation (var, 0, 0); | |
981 | } | |
982 | ||
983 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | |
984 | that will reside in a pseudo register. */ | |
985 | ||
986 | static void | |
987 | expand_one_register_var (tree var) | |
988 | { | |
989 | tree type = TREE_TYPE (var); | |
990 | int unsignedp = TYPE_UNSIGNED (type); | |
991 | enum machine_mode reg_mode | |
992 | = promote_mode (type, DECL_MODE (var), &unsignedp, 0); | |
993 | rtx x = gen_reg_rtx (reg_mode); | |
994 | ||
995 | SET_DECL_RTL (var, x); | |
996 | ||
997 | /* Note if the object is a user variable. */ | |
998 | if (!DECL_ARTIFICIAL (var)) | |
1f6d3a08 RH |
999 | mark_user_reg (x); |
1000 | ||
61021c2c AP |
1001 | if (POINTER_TYPE_P (type)) |
1002 | mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (TREE_TYPE (var)))); | |
1f6d3a08 RH |
1003 | } |
1004 | ||
1005 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | |
128a79fb | 1006 | has some associated error, e.g. its type is error-mark. We just need |
1f6d3a08 RH |
1007 | to pick something that won't crash the rest of the compiler. */ |
1008 | ||
1009 | static void | |
1010 | expand_one_error_var (tree var) | |
1011 | { | |
1012 | enum machine_mode mode = DECL_MODE (var); | |
1013 | rtx x; | |
1014 | ||
1015 | if (mode == BLKmode) | |
1016 | x = gen_rtx_MEM (BLKmode, const0_rtx); | |
1017 | else if (mode == VOIDmode) | |
1018 | x = const0_rtx; | |
1019 | else | |
1020 | x = gen_reg_rtx (mode); | |
1021 | ||
1022 | SET_DECL_RTL (var, x); | |
1023 | } | |
1024 | ||
c22cacf3 | 1025 | /* A subroutine of expand_one_var. VAR is a variable that will be |
1f6d3a08 RH |
1026 | allocated to the local stack frame. Return true if we wish to |
1027 | add VAR to STACK_VARS so that it will be coalesced with other | |
1028 | variables. Return false to allocate VAR immediately. | |
1029 | ||
1030 | This function is used to reduce the number of variables considered | |
1031 | for coalescing, which reduces the size of the quadratic problem. */ | |
1032 | ||
1033 | static bool | |
1034 | defer_stack_allocation (tree var, bool toplevel) | |
1035 | { | |
7d69de61 RH |
1036 | /* If stack protection is enabled, *all* stack variables must be deferred, |
1037 | so that we can re-order the strings to the top of the frame. */ | |
1038 | if (flag_stack_protect) | |
1039 | return true; | |
1040 | ||
1f6d3a08 RH |
1041 | /* Variables in the outermost scope automatically conflict with |
1042 | every other variable. The only reason to want to defer them | |
1043 | at all is that, after sorting, we can more efficiently pack | |
1044 | small variables in the stack frame. Continue to defer at -O2. */ | |
1045 | if (toplevel && optimize < 2) | |
1046 | return false; | |
1047 | ||
1048 | /* Without optimization, *most* variables are allocated from the | |
1049 | stack, which makes the quadratic problem large exactly when we | |
c22cacf3 | 1050 | want compilation to proceed as quickly as possible. On the |
1f6d3a08 RH |
1051 | other hand, we don't want the function's stack frame size to |
1052 | get completely out of hand. So we avoid adding scalars and | |
1053 | "small" aggregates to the list at all. */ | |
1054 | if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) | |
1055 | return false; | |
1056 | ||
1057 | return true; | |
1058 | } | |
1059 | ||
1060 | /* A subroutine of expand_used_vars. Expand one variable according to | |
2a7e31df | 1061 | its flavor. Variables to be placed on the stack are not actually |
ff28a94d JH |
1062 | expanded yet, merely recorded. |
1063 | When REALLY_EXPAND is false, only add stack values to be allocated. | |
1064 | Return stack usage this variable is supposed to take. | |
1065 | */ | |
1f6d3a08 | 1066 | |
ff28a94d JH |
1067 | static HOST_WIDE_INT |
1068 | expand_one_var (tree var, bool toplevel, bool really_expand) | |
1f6d3a08 | 1069 | { |
2e3f842f L |
1070 | if (SUPPORTS_STACK_ALIGNMENT |
1071 | && TREE_TYPE (var) != error_mark_node | |
1072 | && TREE_CODE (var) == VAR_DECL) | |
1073 | { | |
1074 | unsigned int align; | |
1075 | ||
1076 | /* Because we don't know if VAR will be in register or on stack, | |
1077 | we conservatively assume it will be on stack even if VAR is | |
1078 | eventually put into register after RA pass. For non-automatic | |
1079 | variables, which won't be on stack, we collect alignment of | |
1080 | type and ignore user specified alignment. */ | |
1081 | if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
1082 | align = TYPE_ALIGN (TREE_TYPE (var)); | |
1083 | else | |
1084 | align = DECL_ALIGN (var); | |
1085 | ||
1086 | if (crtl->stack_alignment_estimated < align) | |
1087 | { | |
1088 | /* stack_alignment_estimated shouldn't change after stack | |
1089 | realign decision made */ | |
1090 | gcc_assert(!crtl->stack_realign_processed); | |
1091 | crtl->stack_alignment_estimated = align; | |
1092 | } | |
1093 | } | |
1094 | ||
1f6d3a08 | 1095 | if (TREE_CODE (var) != VAR_DECL) |
4846b435 | 1096 | ; |
1f6d3a08 RH |
1097 | else if (DECL_EXTERNAL (var)) |
1098 | ; | |
833b3afe | 1099 | else if (DECL_HAS_VALUE_EXPR_P (var)) |
1f6d3a08 RH |
1100 | ; |
1101 | else if (TREE_STATIC (var)) | |
7e8b322a | 1102 | ; |
1f6d3a08 RH |
1103 | else if (DECL_RTL_SET_P (var)) |
1104 | ; | |
1105 | else if (TREE_TYPE (var) == error_mark_node) | |
ff28a94d JH |
1106 | { |
1107 | if (really_expand) | |
1108 | expand_one_error_var (var); | |
1109 | } | |
1f6d3a08 | 1110 | else if (DECL_HARD_REGISTER (var)) |
ff28a94d JH |
1111 | { |
1112 | if (really_expand) | |
1113 | expand_one_hard_reg_var (var); | |
1114 | } | |
1f6d3a08 | 1115 | else if (use_register_for_decl (var)) |
ff28a94d JH |
1116 | { |
1117 | if (really_expand) | |
1118 | expand_one_register_var (var); | |
1119 | } | |
1f6d3a08 RH |
1120 | else if (defer_stack_allocation (var, toplevel)) |
1121 | add_stack_var (var); | |
1122 | else | |
ff28a94d | 1123 | { |
bd9f1b4b JH |
1124 | if (really_expand) |
1125 | expand_one_stack_var (var); | |
ff28a94d JH |
1126 | return tree_low_cst (DECL_SIZE_UNIT (var), 1); |
1127 | } | |
1128 | return 0; | |
1f6d3a08 RH |
1129 | } |
1130 | ||
1131 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1132 | expanding variables. Those variables that can be put into registers | |
1133 | are allocated pseudos; those that can't are put on the stack. | |
1134 | ||
1135 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1136 | ||
1137 | static void | |
1138 | expand_used_vars_for_block (tree block, bool toplevel) | |
1139 | { | |
1140 | size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1141 | tree t; | |
1142 | ||
1143 | old_sv_num = toplevel ? 0 : stack_vars_num; | |
1144 | ||
1145 | /* Expand all variables at this level. */ | |
1146 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
7e8b322a | 1147 | if (TREE_USED (t)) |
ff28a94d | 1148 | expand_one_var (t, toplevel, true); |
1f6d3a08 RH |
1149 | |
1150 | this_sv_num = stack_vars_num; | |
1151 | ||
1152 | /* Expand all variables at containing levels. */ | |
1153 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1154 | expand_used_vars_for_block (t, false); | |
1155 | ||
1156 | /* Since we do not track exact variable lifetimes (which is not even | |
6fc0bb99 | 1157 | possible for variables whose address escapes), we mirror the block |
1f6d3a08 RH |
1158 | tree in the interference graph. Here we cause all variables at this |
1159 | level, and all sublevels, to conflict. Do make certain that a | |
1160 | variable conflicts with itself. */ | |
1161 | if (old_sv_num < this_sv_num) | |
1162 | { | |
1163 | new_sv_num = stack_vars_num; | |
1164 | resize_stack_vars_conflict (new_sv_num); | |
1165 | ||
1166 | for (i = old_sv_num; i < new_sv_num; ++i) | |
f4a6d54e RH |
1167 | for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) |
1168 | add_stack_var_conflict (i, j); | |
1f6d3a08 RH |
1169 | } |
1170 | } | |
1171 | ||
1172 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1173 | and clear TREE_USED on all local variables. */ | |
1174 | ||
1175 | static void | |
1176 | clear_tree_used (tree block) | |
1177 | { | |
1178 | tree t; | |
1179 | ||
1180 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1181 | /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | |
1182 | TREE_USED (t) = 0; | |
1183 | ||
1184 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1185 | clear_tree_used (t); | |
1186 | } | |
1187 | ||
7d69de61 RH |
1188 | /* Examine TYPE and determine a bit mask of the following features. */ |
1189 | ||
1190 | #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | |
1191 | #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | |
1192 | #define SPCT_HAS_ARRAY 4 | |
1193 | #define SPCT_HAS_AGGREGATE 8 | |
1194 | ||
1195 | static unsigned int | |
1196 | stack_protect_classify_type (tree type) | |
1197 | { | |
1198 | unsigned int ret = 0; | |
1199 | tree t; | |
1200 | ||
1201 | switch (TREE_CODE (type)) | |
1202 | { | |
1203 | case ARRAY_TYPE: | |
1204 | t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
1205 | if (t == char_type_node | |
1206 | || t == signed_char_type_node | |
1207 | || t == unsigned_char_type_node) | |
1208 | { | |
15362b89 JJ |
1209 | unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1210 | unsigned HOST_WIDE_INT len; | |
7d69de61 | 1211 | |
15362b89 JJ |
1212 | if (!TYPE_SIZE_UNIT (type) |
1213 | || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | |
1214 | len = max; | |
7d69de61 | 1215 | else |
15362b89 | 1216 | len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); |
7d69de61 RH |
1217 | |
1218 | if (len < max) | |
1219 | ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1220 | else | |
1221 | ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1222 | } | |
1223 | else | |
1224 | ret = SPCT_HAS_ARRAY; | |
1225 | break; | |
1226 | ||
1227 | case UNION_TYPE: | |
1228 | case QUAL_UNION_TYPE: | |
1229 | case RECORD_TYPE: | |
1230 | ret = SPCT_HAS_AGGREGATE; | |
1231 | for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | |
1232 | if (TREE_CODE (t) == FIELD_DECL) | |
1233 | ret |= stack_protect_classify_type (TREE_TYPE (t)); | |
1234 | break; | |
1235 | ||
1236 | default: | |
1237 | break; | |
1238 | } | |
1239 | ||
1240 | return ret; | |
1241 | } | |
1242 | ||
a4d05547 KH |
1243 | /* Return nonzero if DECL should be segregated into the "vulnerable" upper |
1244 | part of the local stack frame. Remember if we ever return nonzero for | |
7d69de61 RH |
1245 | any variable in this function. The return value is the phase number in |
1246 | which the variable should be allocated. */ | |
1247 | ||
1248 | static int | |
1249 | stack_protect_decl_phase (tree decl) | |
1250 | { | |
1251 | unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl)); | |
1252 | int ret = 0; | |
1253 | ||
1254 | if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) | |
1255 | has_short_buffer = true; | |
1256 | ||
1257 | if (flag_stack_protect == 2) | |
1258 | { | |
1259 | if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | |
1260 | && !(bits & SPCT_HAS_AGGREGATE)) | |
1261 | ret = 1; | |
1262 | else if (bits & SPCT_HAS_ARRAY) | |
1263 | ret = 2; | |
1264 | } | |
1265 | else | |
1266 | ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | |
1267 | ||
1268 | if (ret) | |
1269 | has_protected_decls = true; | |
1270 | ||
1271 | return ret; | |
1272 | } | |
1273 | ||
1274 | /* Two helper routines that check for phase 1 and phase 2. These are used | |
1275 | as callbacks for expand_stack_vars. */ | |
1276 | ||
1277 | static bool | |
1278 | stack_protect_decl_phase_1 (tree decl) | |
1279 | { | |
1280 | return stack_protect_decl_phase (decl) == 1; | |
1281 | } | |
1282 | ||
1283 | static bool | |
1284 | stack_protect_decl_phase_2 (tree decl) | |
1285 | { | |
1286 | return stack_protect_decl_phase (decl) == 2; | |
1287 | } | |
1288 | ||
1289 | /* Ensure that variables in different stack protection phases conflict | |
1290 | so that they are not merged and share the same stack slot. */ | |
1291 | ||
1292 | static void | |
1293 | add_stack_protection_conflicts (void) | |
1294 | { | |
1295 | size_t i, j, n = stack_vars_num; | |
1296 | unsigned char *phase; | |
1297 | ||
1298 | phase = XNEWVEC (unsigned char, n); | |
1299 | for (i = 0; i < n; ++i) | |
1300 | phase[i] = stack_protect_decl_phase (stack_vars[i].decl); | |
1301 | ||
1302 | for (i = 0; i < n; ++i) | |
1303 | { | |
1304 | unsigned char ph_i = phase[i]; | |
1305 | for (j = 0; j < i; ++j) | |
1306 | if (ph_i != phase[j]) | |
1307 | add_stack_var_conflict (i, j); | |
1308 | } | |
1309 | ||
1310 | XDELETEVEC (phase); | |
1311 | } | |
1312 | ||
1313 | /* Create a decl for the guard at the top of the stack frame. */ | |
1314 | ||
1315 | static void | |
1316 | create_stack_guard (void) | |
1317 | { | |
1318 | tree guard = build_decl (VAR_DECL, NULL, ptr_type_node); | |
1319 | TREE_THIS_VOLATILE (guard) = 1; | |
1320 | TREE_USED (guard) = 1; | |
1321 | expand_one_stack_var (guard); | |
cb91fab0 | 1322 | crtl->stack_protect_guard = guard; |
7d69de61 RH |
1323 | } |
1324 | ||
ff28a94d JH |
1325 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1326 | expanding variables. Those variables that can be put into registers | |
1327 | are allocated pseudos; those that can't are put on the stack. | |
1328 | ||
1329 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1330 | ||
1331 | static HOST_WIDE_INT | |
1332 | account_used_vars_for_block (tree block, bool toplevel) | |
1333 | { | |
1334 | size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1335 | tree t; | |
1336 | HOST_WIDE_INT size = 0; | |
1337 | ||
1338 | old_sv_num = toplevel ? 0 : stack_vars_num; | |
1339 | ||
1340 | /* Expand all variables at this level. */ | |
1341 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1342 | if (TREE_USED (t)) | |
1343 | size += expand_one_var (t, toplevel, false); | |
1344 | ||
1345 | this_sv_num = stack_vars_num; | |
1346 | ||
1347 | /* Expand all variables at containing levels. */ | |
1348 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1349 | size += account_used_vars_for_block (t, false); | |
1350 | ||
1351 | /* Since we do not track exact variable lifetimes (which is not even | |
1352 | possible for variables whose address escapes), we mirror the block | |
1353 | tree in the interference graph. Here we cause all variables at this | |
1354 | level, and all sublevels, to conflict. Do make certain that a | |
1355 | variable conflicts with itself. */ | |
1356 | if (old_sv_num < this_sv_num) | |
1357 | { | |
1358 | new_sv_num = stack_vars_num; | |
1359 | resize_stack_vars_conflict (new_sv_num); | |
1360 | ||
1361 | for (i = old_sv_num; i < new_sv_num; ++i) | |
1362 | for (j = i < this_sv_num ? i+1 : this_sv_num; j-- > old_sv_num ;) | |
1363 | add_stack_var_conflict (i, j); | |
1364 | } | |
1365 | return size; | |
1366 | } | |
1367 | ||
1368 | /* Prepare for expanding variables. */ | |
1369 | static void | |
1370 | init_vars_expansion (void) | |
1371 | { | |
1372 | tree t; | |
cb91fab0 JH |
1373 | /* Set TREE_USED on all variables in the local_decls. */ |
1374 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
ff28a94d JH |
1375 | TREE_USED (TREE_VALUE (t)) = 1; |
1376 | ||
1377 | /* Clear TREE_USED on all variables associated with a block scope. */ | |
1378 | clear_tree_used (DECL_INITIAL (current_function_decl)); | |
1379 | ||
1380 | /* Initialize local stack smashing state. */ | |
1381 | has_protected_decls = false; | |
1382 | has_short_buffer = false; | |
1383 | } | |
1384 | ||
1385 | /* Free up stack variable graph data. */ | |
1386 | static void | |
1387 | fini_vars_expansion (void) | |
1388 | { | |
1389 | XDELETEVEC (stack_vars); | |
1390 | XDELETEVEC (stack_vars_sorted); | |
1391 | XDELETEVEC (stack_vars_conflict); | |
1392 | stack_vars = NULL; | |
1393 | stack_vars_alloc = stack_vars_num = 0; | |
1394 | stack_vars_conflict = NULL; | |
1395 | stack_vars_conflict_alloc = 0; | |
1396 | } | |
1397 | ||
b5a430f3 SB |
1398 | /* Make a fair guess for the size of the stack frame of the current |
1399 | function. This doesn't have to be exact, the result is only used | |
1400 | in the inline heuristics. So we don't want to run the full stack | |
1401 | var packing algorithm (which is quadratic in the number of stack | |
1402 | vars). Instead, we calculate the total size of all stack vars. | |
1403 | This turns out to be a pretty fair estimate -- packing of stack | |
1404 | vars doesn't happen very often. */ | |
1405 | ||
ff28a94d JH |
1406 | HOST_WIDE_INT |
1407 | estimated_stack_frame_size (void) | |
1408 | { | |
1409 | HOST_WIDE_INT size = 0; | |
b5a430f3 | 1410 | size_t i; |
ff28a94d JH |
1411 | tree t, outer_block = DECL_INITIAL (current_function_decl); |
1412 | ||
1413 | init_vars_expansion (); | |
1414 | ||
cb91fab0 | 1415 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) |
ff28a94d JH |
1416 | { |
1417 | tree var = TREE_VALUE (t); | |
1418 | ||
1419 | if (TREE_USED (var)) | |
1420 | size += expand_one_var (var, true, false); | |
1421 | TREE_USED (var) = 1; | |
1422 | } | |
1423 | size += account_used_vars_for_block (outer_block, true); | |
b5a430f3 | 1424 | |
ff28a94d JH |
1425 | if (stack_vars_num > 0) |
1426 | { | |
b5a430f3 SB |
1427 | /* Fake sorting the stack vars for account_stack_vars (). */ |
1428 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
1429 | for (i = 0; i < stack_vars_num; ++i) | |
1430 | stack_vars_sorted[i] = i; | |
ff28a94d JH |
1431 | size += account_stack_vars (); |
1432 | fini_vars_expansion (); | |
1433 | } | |
b5a430f3 | 1434 | |
ff28a94d JH |
1435 | return size; |
1436 | } | |
1437 | ||
1f6d3a08 | 1438 | /* Expand all variables used in the function. */ |
727a31fa RH |
1439 | |
1440 | static void | |
1441 | expand_used_vars (void) | |
1442 | { | |
802e9f8e | 1443 | tree t, next, outer_block = DECL_INITIAL (current_function_decl); |
727a31fa | 1444 | |
1f6d3a08 RH |
1445 | /* Compute the phase of the stack frame for this function. */ |
1446 | { | |
1447 | int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1448 | int off = STARTING_FRAME_OFFSET % align; | |
1449 | frame_phase = off ? align - off : 0; | |
1450 | } | |
727a31fa | 1451 | |
ff28a94d | 1452 | init_vars_expansion (); |
7d69de61 | 1453 | |
cb91fab0 | 1454 | /* At this point all variables on the local_decls with TREE_USED |
1f6d3a08 | 1455 | set are not associated with any block scope. Lay them out. */ |
802e9f8e JJ |
1456 | t = cfun->local_decls; |
1457 | cfun->local_decls = NULL_TREE; | |
1458 | for (; t; t = next) | |
1f6d3a08 RH |
1459 | { |
1460 | tree var = TREE_VALUE (t); | |
1461 | bool expand_now = false; | |
1462 | ||
802e9f8e JJ |
1463 | next = TREE_CHAIN (t); |
1464 | ||
1f6d3a08 RH |
1465 | /* We didn't set a block for static or extern because it's hard |
1466 | to tell the difference between a global variable (re)declared | |
1467 | in a local scope, and one that's really declared there to | |
1468 | begin with. And it doesn't really matter much, since we're | |
1469 | not giving them stack space. Expand them now. */ | |
1470 | if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
1471 | expand_now = true; | |
1472 | ||
1473 | /* Any variable that could have been hoisted into an SSA_NAME | |
1474 | will have been propagated anywhere the optimizers chose, | |
1475 | i.e. not confined to their original block. Allocate them | |
1476 | as if they were defined in the outermost scope. */ | |
1477 | else if (is_gimple_reg (var)) | |
1478 | expand_now = true; | |
1479 | ||
1480 | /* If the variable is not associated with any block, then it | |
1481 | was created by the optimizers, and could be live anywhere | |
1482 | in the function. */ | |
1483 | else if (TREE_USED (var)) | |
1484 | expand_now = true; | |
1485 | ||
1486 | /* Finally, mark all variables on the list as used. We'll use | |
1487 | this in a moment when we expand those associated with scopes. */ | |
1488 | TREE_USED (var) = 1; | |
1489 | ||
1490 | if (expand_now) | |
802e9f8e JJ |
1491 | { |
1492 | expand_one_var (var, true, true); | |
1493 | if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | |
1494 | { | |
1495 | rtx rtl = DECL_RTL_IF_SET (var); | |
1496 | ||
1497 | /* Keep artificial non-ignored vars in cfun->local_decls | |
1498 | chain until instantiate_decls. */ | |
1499 | if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
1500 | { | |
1501 | TREE_CHAIN (t) = cfun->local_decls; | |
1502 | cfun->local_decls = t; | |
1503 | continue; | |
1504 | } | |
1505 | } | |
1506 | } | |
1507 | ||
1508 | ggc_free (t); | |
1f6d3a08 | 1509 | } |
1f6d3a08 RH |
1510 | |
1511 | /* At this point, all variables within the block tree with TREE_USED | |
1512 | set are actually used by the optimized function. Lay them out. */ | |
1513 | expand_used_vars_for_block (outer_block, true); | |
1514 | ||
1515 | if (stack_vars_num > 0) | |
1516 | { | |
1517 | /* Due to the way alias sets work, no variables with non-conflicting | |
c22cacf3 | 1518 | alias sets may be assigned the same address. Add conflicts to |
1f6d3a08 RH |
1519 | reflect this. */ |
1520 | add_alias_set_conflicts (); | |
1521 | ||
c22cacf3 | 1522 | /* If stack protection is enabled, we don't share space between |
7d69de61 RH |
1523 | vulnerable data and non-vulnerable data. */ |
1524 | if (flag_stack_protect) | |
1525 | add_stack_protection_conflicts (); | |
1526 | ||
c22cacf3 | 1527 | /* Now that we have collected all stack variables, and have computed a |
1f6d3a08 RH |
1528 | minimal interference graph, attempt to save some stack space. */ |
1529 | partition_stack_vars (); | |
1530 | if (dump_file) | |
1531 | dump_stack_var_partition (); | |
7d69de61 RH |
1532 | } |
1533 | ||
1534 | /* There are several conditions under which we should create a | |
1535 | stack guard: protect-all, alloca used, protected decls present. */ | |
1536 | if (flag_stack_protect == 2 | |
1537 | || (flag_stack_protect | |
e3b5732b | 1538 | && (cfun->calls_alloca || has_protected_decls))) |
7d69de61 | 1539 | create_stack_guard (); |
1f6d3a08 | 1540 | |
7d69de61 RH |
1541 | /* Assign rtl to each variable based on these partitions. */ |
1542 | if (stack_vars_num > 0) | |
1543 | { | |
1544 | /* Reorder decls to be protected by iterating over the variables | |
1545 | array multiple times, and allocating out of each phase in turn. */ | |
c22cacf3 | 1546 | /* ??? We could probably integrate this into the qsort we did |
7d69de61 RH |
1547 | earlier, such that we naturally see these variables first, |
1548 | and thus naturally allocate things in the right order. */ | |
1549 | if (has_protected_decls) | |
1550 | { | |
1551 | /* Phase 1 contains only character arrays. */ | |
1552 | expand_stack_vars (stack_protect_decl_phase_1); | |
1553 | ||
1554 | /* Phase 2 contains other kinds of arrays. */ | |
1555 | if (flag_stack_protect == 2) | |
1556 | expand_stack_vars (stack_protect_decl_phase_2); | |
1557 | } | |
1558 | ||
1559 | expand_stack_vars (NULL); | |
1f6d3a08 | 1560 | |
ff28a94d | 1561 | fini_vars_expansion (); |
1f6d3a08 RH |
1562 | } |
1563 | ||
1564 | /* If the target requires that FRAME_OFFSET be aligned, do it. */ | |
1565 | if (STACK_ALIGNMENT_NEEDED) | |
1566 | { | |
1567 | HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1568 | if (!FRAME_GROWS_DOWNWARD) | |
1569 | frame_offset += align - 1; | |
1570 | frame_offset &= -align; | |
1571 | } | |
727a31fa RH |
1572 | } |
1573 | ||
1574 | ||
b7211528 SB |
1575 | /* If we need to produce a detailed dump, print the tree representation |
1576 | for STMT to the dump file. SINCE is the last RTX after which the RTL | |
1577 | generated for STMT should have been appended. */ | |
1578 | ||
1579 | static void | |
726a989a | 1580 | maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) |
b7211528 SB |
1581 | { |
1582 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1583 | { | |
1584 | fprintf (dump_file, "\n;; "); | |
726a989a | 1585 | print_gimple_stmt (dump_file, stmt, 0, TDF_SLIM); |
b7211528 SB |
1586 | fprintf (dump_file, "\n"); |
1587 | ||
1588 | print_rtl (dump_file, since ? NEXT_INSN (since) : since); | |
1589 | } | |
1590 | } | |
1591 | ||
8b11009b ZD |
1592 | /* Maps the blocks that do not contain tree labels to rtx labels. */ |
1593 | ||
1594 | static struct pointer_map_t *lab_rtx_for_bb; | |
1595 | ||
a9b77cd1 ZD |
1596 | /* Returns the label_rtx expression for a label starting basic block BB. */ |
1597 | ||
1598 | static rtx | |
726a989a | 1599 | label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) |
a9b77cd1 | 1600 | { |
726a989a RB |
1601 | gimple_stmt_iterator gsi; |
1602 | tree lab; | |
1603 | gimple lab_stmt; | |
8b11009b | 1604 | void **elt; |
a9b77cd1 ZD |
1605 | |
1606 | if (bb->flags & BB_RTL) | |
1607 | return block_label (bb); | |
1608 | ||
8b11009b ZD |
1609 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1610 | if (elt) | |
ae50c0cb | 1611 | return (rtx) *elt; |
8b11009b ZD |
1612 | |
1613 | /* Find the tree label if it is present. */ | |
1614 | ||
726a989a | 1615 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
a9b77cd1 | 1616 | { |
726a989a RB |
1617 | lab_stmt = gsi_stmt (gsi); |
1618 | if (gimple_code (lab_stmt) != GIMPLE_LABEL) | |
a9b77cd1 ZD |
1619 | break; |
1620 | ||
726a989a | 1621 | lab = gimple_label_label (lab_stmt); |
a9b77cd1 ZD |
1622 | if (DECL_NONLOCAL (lab)) |
1623 | break; | |
1624 | ||
1625 | return label_rtx (lab); | |
1626 | } | |
1627 | ||
8b11009b ZD |
1628 | elt = pointer_map_insert (lab_rtx_for_bb, bb); |
1629 | *elt = gen_label_rtx (); | |
ae50c0cb | 1630 | return (rtx) *elt; |
a9b77cd1 ZD |
1631 | } |
1632 | ||
726a989a RB |
1633 | |
1634 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. | |
80c7a9eb RH |
1635 | Returns a new basic block if we've terminated the current basic |
1636 | block and created a new one. */ | |
1637 | ||
1638 | static basic_block | |
726a989a | 1639 | expand_gimple_cond (basic_block bb, gimple stmt) |
80c7a9eb RH |
1640 | { |
1641 | basic_block new_bb, dest; | |
1642 | edge new_edge; | |
1643 | edge true_edge; | |
1644 | edge false_edge; | |
726a989a | 1645 | tree pred = gimple_cond_pred_to_tree (stmt); |
b7211528 SB |
1646 | rtx last2, last; |
1647 | ||
1648 | last2 = last = get_last_insn (); | |
80c7a9eb RH |
1649 | |
1650 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
726a989a | 1651 | if (gimple_has_location (stmt)) |
80c7a9eb | 1652 | { |
726a989a RB |
1653 | set_curr_insn_source_location (gimple_location (stmt)); |
1654 | set_curr_insn_block (gimple_block (stmt)); | |
80c7a9eb RH |
1655 | } |
1656 | ||
1657 | /* These flags have no purpose in RTL land. */ | |
1658 | true_edge->flags &= ~EDGE_TRUE_VALUE; | |
1659 | false_edge->flags &= ~EDGE_FALSE_VALUE; | |
1660 | ||
1661 | /* We can either have a pure conditional jump with one fallthru edge or | |
1662 | two-way jump that needs to be decomposed into two basic blocks. */ | |
a9b77cd1 | 1663 | if (false_edge->dest == bb->next_bb) |
80c7a9eb | 1664 | { |
a9b77cd1 | 1665 | jumpif (pred, label_rtx_for_bb (true_edge->dest)); |
10d22567 | 1666 | add_reg_br_prob_note (last, true_edge->probability); |
726a989a | 1667 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1668 | if (true_edge->goto_locus) |
7241571e JJ |
1669 | { |
1670 | set_curr_insn_source_location (true_edge->goto_locus); | |
1671 | set_curr_insn_block (true_edge->goto_block); | |
1672 | true_edge->goto_locus = curr_insn_locator (); | |
1673 | } | |
1674 | true_edge->goto_block = NULL; | |
a9b77cd1 | 1675 | false_edge->flags |= EDGE_FALLTHRU; |
726a989a | 1676 | ggc_free (pred); |
80c7a9eb RH |
1677 | return NULL; |
1678 | } | |
a9b77cd1 | 1679 | if (true_edge->dest == bb->next_bb) |
80c7a9eb | 1680 | { |
a9b77cd1 | 1681 | jumpifnot (pred, label_rtx_for_bb (false_edge->dest)); |
10d22567 | 1682 | add_reg_br_prob_note (last, false_edge->probability); |
726a989a | 1683 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1684 | if (false_edge->goto_locus) |
7241571e JJ |
1685 | { |
1686 | set_curr_insn_source_location (false_edge->goto_locus); | |
1687 | set_curr_insn_block (false_edge->goto_block); | |
1688 | false_edge->goto_locus = curr_insn_locator (); | |
1689 | } | |
1690 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1691 | true_edge->flags |= EDGE_FALLTHRU; |
726a989a | 1692 | ggc_free (pred); |
80c7a9eb RH |
1693 | return NULL; |
1694 | } | |
80c7a9eb | 1695 | |
a9b77cd1 | 1696 | jumpif (pred, label_rtx_for_bb (true_edge->dest)); |
10d22567 | 1697 | add_reg_br_prob_note (last, true_edge->probability); |
80c7a9eb | 1698 | last = get_last_insn (); |
7241571e JJ |
1699 | if (false_edge->goto_locus) |
1700 | { | |
1701 | set_curr_insn_source_location (false_edge->goto_locus); | |
1702 | set_curr_insn_block (false_edge->goto_block); | |
1703 | false_edge->goto_locus = curr_insn_locator (); | |
1704 | } | |
1705 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1706 | emit_jump (label_rtx_for_bb (false_edge->dest)); |
80c7a9eb RH |
1707 | |
1708 | BB_END (bb) = last; | |
1709 | if (BARRIER_P (BB_END (bb))) | |
1710 | BB_END (bb) = PREV_INSN (BB_END (bb)); | |
1711 | update_bb_for_insn (bb); | |
1712 | ||
1713 | new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
1714 | dest = false_edge->dest; | |
1715 | redirect_edge_succ (false_edge, new_bb); | |
1716 | false_edge->flags |= EDGE_FALLTHRU; | |
1717 | new_bb->count = false_edge->count; | |
1718 | new_bb->frequency = EDGE_FREQUENCY (false_edge); | |
1719 | new_edge = make_edge (new_bb, dest, 0); | |
1720 | new_edge->probability = REG_BR_PROB_BASE; | |
1721 | new_edge->count = new_bb->count; | |
1722 | if (BARRIER_P (BB_END (new_bb))) | |
1723 | BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); | |
1724 | update_bb_for_insn (new_bb); | |
1725 | ||
726a989a | 1726 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
c22cacf3 | 1727 | |
7787b4aa JJ |
1728 | if (true_edge->goto_locus) |
1729 | { | |
1730 | set_curr_insn_source_location (true_edge->goto_locus); | |
1731 | set_curr_insn_block (true_edge->goto_block); | |
1732 | true_edge->goto_locus = curr_insn_locator (); | |
1733 | } | |
1734 | true_edge->goto_block = NULL; | |
1735 | ||
726a989a | 1736 | ggc_free (pred); |
80c7a9eb RH |
1737 | return new_bb; |
1738 | } | |
1739 | ||
726a989a | 1740 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL |
224e770b RH |
1741 | that has CALL_EXPR_TAILCALL set. Returns non-null if we actually |
1742 | generated a tail call (something that might be denied by the ABI | |
cea49550 RH |
1743 | rules governing the call; see calls.c). |
1744 | ||
1745 | Sets CAN_FALLTHRU if we generated a *conditional* tail call, and | |
1746 | can still reach the rest of BB. The case here is __builtin_sqrt, | |
1747 | where the NaN result goes through the external function (with a | |
1748 | tailcall) and the normal result happens via a sqrt instruction. */ | |
80c7a9eb RH |
1749 | |
1750 | static basic_block | |
726a989a | 1751 | expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) |
80c7a9eb | 1752 | { |
b7211528 | 1753 | rtx last2, last; |
224e770b | 1754 | edge e; |
628f6a4e | 1755 | edge_iterator ei; |
224e770b RH |
1756 | int probability; |
1757 | gcov_type count; | |
726a989a | 1758 | tree stmt_tree = gimple_to_tree (stmt); |
80c7a9eb | 1759 | |
b7211528 SB |
1760 | last2 = last = get_last_insn (); |
1761 | ||
726a989a RB |
1762 | expand_expr_stmt (stmt_tree); |
1763 | ||
1764 | release_stmt_tree (stmt, stmt_tree); | |
80c7a9eb RH |
1765 | |
1766 | for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | |
224e770b RH |
1767 | if (CALL_P (last) && SIBLING_CALL_P (last)) |
1768 | goto found; | |
80c7a9eb | 1769 | |
726a989a | 1770 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 1771 | |
cea49550 | 1772 | *can_fallthru = true; |
224e770b | 1773 | return NULL; |
80c7a9eb | 1774 | |
224e770b RH |
1775 | found: |
1776 | /* ??? Wouldn't it be better to just reset any pending stack adjust? | |
1777 | Any instructions emitted here are about to be deleted. */ | |
1778 | do_pending_stack_adjust (); | |
1779 | ||
1780 | /* Remove any non-eh, non-abnormal edges that don't go to exit. */ | |
1781 | /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be | |
1782 | EH or abnormal edges, we shouldn't have created a tail call in | |
1783 | the first place. So it seems to me we should just be removing | |
1784 | all edges here, or redirecting the existing fallthru edge to | |
1785 | the exit block. */ | |
1786 | ||
224e770b RH |
1787 | probability = 0; |
1788 | count = 0; | |
224e770b | 1789 | |
628f6a4e BE |
1790 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
1791 | { | |
224e770b RH |
1792 | if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
1793 | { | |
1794 | if (e->dest != EXIT_BLOCK_PTR) | |
80c7a9eb | 1795 | { |
224e770b RH |
1796 | e->dest->count -= e->count; |
1797 | e->dest->frequency -= EDGE_FREQUENCY (e); | |
1798 | if (e->dest->count < 0) | |
c22cacf3 | 1799 | e->dest->count = 0; |
224e770b | 1800 | if (e->dest->frequency < 0) |
c22cacf3 | 1801 | e->dest->frequency = 0; |
80c7a9eb | 1802 | } |
224e770b RH |
1803 | count += e->count; |
1804 | probability += e->probability; | |
1805 | remove_edge (e); | |
80c7a9eb | 1806 | } |
628f6a4e BE |
1807 | else |
1808 | ei_next (&ei); | |
80c7a9eb RH |
1809 | } |
1810 | ||
224e770b RH |
1811 | /* This is somewhat ugly: the call_expr expander often emits instructions |
1812 | after the sibcall (to perform the function return). These confuse the | |
12eff7b7 | 1813 | find_many_sub_basic_blocks code, so we need to get rid of these. */ |
224e770b | 1814 | last = NEXT_INSN (last); |
341c100f | 1815 | gcc_assert (BARRIER_P (last)); |
cea49550 RH |
1816 | |
1817 | *can_fallthru = false; | |
224e770b RH |
1818 | while (NEXT_INSN (last)) |
1819 | { | |
1820 | /* For instance an sqrt builtin expander expands if with | |
1821 | sibcall in the then and label for `else`. */ | |
1822 | if (LABEL_P (NEXT_INSN (last))) | |
cea49550 RH |
1823 | { |
1824 | *can_fallthru = true; | |
1825 | break; | |
1826 | } | |
224e770b RH |
1827 | delete_insn (NEXT_INSN (last)); |
1828 | } | |
1829 | ||
1830 | e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | |
1831 | e->probability += probability; | |
1832 | e->count += count; | |
1833 | BB_END (bb) = last; | |
1834 | update_bb_for_insn (bb); | |
1835 | ||
1836 | if (NEXT_INSN (last)) | |
1837 | { | |
1838 | bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
1839 | ||
1840 | last = BB_END (bb); | |
1841 | if (BARRIER_P (last)) | |
1842 | BB_END (bb) = PREV_INSN (last); | |
1843 | } | |
1844 | ||
726a989a | 1845 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 1846 | |
224e770b | 1847 | return bb; |
80c7a9eb RH |
1848 | } |
1849 | ||
242229bb JH |
1850 | /* Expand basic block BB from GIMPLE trees to RTL. */ |
1851 | ||
1852 | static basic_block | |
10d22567 | 1853 | expand_gimple_basic_block (basic_block bb) |
242229bb | 1854 | { |
726a989a RB |
1855 | gimple_stmt_iterator gsi; |
1856 | gimple_seq stmts; | |
1857 | gimple stmt = NULL; | |
242229bb JH |
1858 | rtx note, last; |
1859 | edge e; | |
628f6a4e | 1860 | edge_iterator ei; |
8b11009b | 1861 | void **elt; |
242229bb JH |
1862 | |
1863 | if (dump_file) | |
726a989a RB |
1864 | fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
1865 | bb->index); | |
1866 | ||
1867 | /* Note that since we are now transitioning from GIMPLE to RTL, we | |
1868 | cannot use the gsi_*_bb() routines because they expect the basic | |
1869 | block to be in GIMPLE, instead of RTL. Therefore, we need to | |
1870 | access the BB sequence directly. */ | |
1871 | stmts = bb_seq (bb); | |
1872 | bb->il.gimple = NULL; | |
bf08ebeb | 1873 | rtl_profile_for_bb (bb); |
5e2d947c JH |
1874 | init_rtl_bb_info (bb); |
1875 | bb->flags |= BB_RTL; | |
1876 | ||
a9b77cd1 ZD |
1877 | /* Remove the RETURN_EXPR if we may fall though to the exit |
1878 | instead. */ | |
726a989a RB |
1879 | gsi = gsi_last (stmts); |
1880 | if (!gsi_end_p (gsi) | |
1881 | && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) | |
a9b77cd1 | 1882 | { |
726a989a | 1883 | gimple ret_stmt = gsi_stmt (gsi); |
a9b77cd1 ZD |
1884 | |
1885 | gcc_assert (single_succ_p (bb)); | |
1886 | gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | |
1887 | ||
1888 | if (bb->next_bb == EXIT_BLOCK_PTR | |
726a989a | 1889 | && !gimple_return_retval (ret_stmt)) |
a9b77cd1 | 1890 | { |
726a989a | 1891 | gsi_remove (&gsi, false); |
a9b77cd1 ZD |
1892 | single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
1893 | } | |
1894 | } | |
1895 | ||
726a989a RB |
1896 | gsi = gsi_start (stmts); |
1897 | if (!gsi_end_p (gsi)) | |
8b11009b | 1898 | { |
726a989a RB |
1899 | stmt = gsi_stmt (gsi); |
1900 | if (gimple_code (stmt) != GIMPLE_LABEL) | |
1901 | stmt = NULL; | |
8b11009b | 1902 | } |
242229bb | 1903 | |
8b11009b ZD |
1904 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1905 | ||
1906 | if (stmt || elt) | |
242229bb JH |
1907 | { |
1908 | last = get_last_insn (); | |
1909 | ||
8b11009b ZD |
1910 | if (stmt) |
1911 | { | |
726a989a RB |
1912 | tree stmt_tree = gimple_to_tree (stmt); |
1913 | expand_expr_stmt (stmt_tree); | |
1914 | release_stmt_tree (stmt, stmt_tree); | |
1915 | gsi_next (&gsi); | |
8b11009b ZD |
1916 | } |
1917 | ||
1918 | if (elt) | |
ae50c0cb | 1919 | emit_label ((rtx) *elt); |
242229bb | 1920 | |
caf93cb0 | 1921 | /* Java emits line number notes in the top of labels. |
c22cacf3 | 1922 | ??? Make this go away once line number notes are obsoleted. */ |
242229bb | 1923 | BB_HEAD (bb) = NEXT_INSN (last); |
4b4bf941 | 1924 | if (NOTE_P (BB_HEAD (bb))) |
242229bb | 1925 | BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
242229bb | 1926 | note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
b7211528 | 1927 | |
726a989a | 1928 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
242229bb JH |
1929 | } |
1930 | else | |
1931 | note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | |
1932 | ||
1933 | NOTE_BASIC_BLOCK (note) = bb; | |
1934 | ||
628f6a4e | 1935 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
242229bb | 1936 | { |
242229bb JH |
1937 | /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ |
1938 | e->flags &= ~EDGE_EXECUTABLE; | |
1939 | ||
1940 | /* At the moment not all abnormal edges match the RTL representation. | |
c22cacf3 MS |
1941 | It is safe to remove them here as find_many_sub_basic_blocks will |
1942 | rediscover them. In the future we should get this fixed properly. */ | |
242229bb JH |
1943 | if (e->flags & EDGE_ABNORMAL) |
1944 | remove_edge (e); | |
628f6a4e BE |
1945 | else |
1946 | ei_next (&ei); | |
242229bb JH |
1947 | } |
1948 | ||
726a989a | 1949 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
242229bb | 1950 | { |
726a989a | 1951 | gimple stmt = gsi_stmt (gsi); |
cea49550 | 1952 | basic_block new_bb; |
242229bb | 1953 | |
242229bb JH |
1954 | /* Expand this statement, then evaluate the resulting RTL and |
1955 | fixup the CFG accordingly. */ | |
726a989a | 1956 | if (gimple_code (stmt) == GIMPLE_COND) |
cea49550 | 1957 | { |
726a989a | 1958 | new_bb = expand_gimple_cond (bb, stmt); |
cea49550 RH |
1959 | if (new_bb) |
1960 | return new_bb; | |
1961 | } | |
80c7a9eb | 1962 | else |
242229bb | 1963 | { |
726a989a | 1964 | if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
cea49550 RH |
1965 | { |
1966 | bool can_fallthru; | |
1967 | new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | |
1968 | if (new_bb) | |
1969 | { | |
1970 | if (can_fallthru) | |
1971 | bb = new_bb; | |
1972 | else | |
1973 | return new_bb; | |
1974 | } | |
1975 | } | |
b5c3dfbb | 1976 | else if (gimple_code (stmt) != GIMPLE_CHANGE_DYNAMIC_TYPE) |
b7211528 | 1977 | { |
726a989a | 1978 | tree stmt_tree = gimple_to_tree (stmt); |
b7211528 | 1979 | last = get_last_insn (); |
726a989a RB |
1980 | expand_expr_stmt (stmt_tree); |
1981 | maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
1982 | release_stmt_tree (stmt, stmt_tree); | |
b7211528 | 1983 | } |
242229bb JH |
1984 | } |
1985 | } | |
1986 | ||
7241571e | 1987 | /* Expand implicit goto and convert goto_locus. */ |
a9b77cd1 ZD |
1988 | FOR_EACH_EDGE (e, ei, bb->succs) |
1989 | { | |
7241571e JJ |
1990 | if (e->goto_locus && e->goto_block) |
1991 | { | |
1992 | set_curr_insn_source_location (e->goto_locus); | |
1993 | set_curr_insn_block (e->goto_block); | |
1994 | e->goto_locus = curr_insn_locator (); | |
1995 | } | |
1996 | e->goto_block = NULL; | |
1997 | if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | |
1998 | { | |
1999 | emit_jump (label_rtx_for_bb (e->dest)); | |
2000 | e->flags &= ~EDGE_FALLTHRU; | |
2001 | } | |
a9b77cd1 ZD |
2002 | } |
2003 | ||
242229bb JH |
2004 | do_pending_stack_adjust (); |
2005 | ||
3f117656 | 2006 | /* Find the block tail. The last insn in the block is the insn |
242229bb JH |
2007 | before a barrier and/or table jump insn. */ |
2008 | last = get_last_insn (); | |
4b4bf941 | 2009 | if (BARRIER_P (last)) |
242229bb JH |
2010 | last = PREV_INSN (last); |
2011 | if (JUMP_TABLE_DATA_P (last)) | |
2012 | last = PREV_INSN (PREV_INSN (last)); | |
2013 | BB_END (bb) = last; | |
caf93cb0 | 2014 | |
242229bb | 2015 | update_bb_for_insn (bb); |
80c7a9eb | 2016 | |
242229bb JH |
2017 | return bb; |
2018 | } | |
2019 | ||
2020 | ||
2021 | /* Create a basic block for initialization code. */ | |
2022 | ||
2023 | static basic_block | |
2024 | construct_init_block (void) | |
2025 | { | |
2026 | basic_block init_block, first_block; | |
fd44f634 JH |
2027 | edge e = NULL; |
2028 | int flags; | |
275a4187 | 2029 | |
fd44f634 JH |
2030 | /* Multiple entry points not supported yet. */ |
2031 | gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); | |
5e2d947c JH |
2032 | init_rtl_bb_info (ENTRY_BLOCK_PTR); |
2033 | init_rtl_bb_info (EXIT_BLOCK_PTR); | |
2034 | ENTRY_BLOCK_PTR->flags |= BB_RTL; | |
2035 | EXIT_BLOCK_PTR->flags |= BB_RTL; | |
242229bb | 2036 | |
fd44f634 | 2037 | e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); |
275a4187 | 2038 | |
fd44f634 JH |
2039 | /* When entry edge points to first basic block, we don't need jump, |
2040 | otherwise we have to jump into proper target. */ | |
2041 | if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) | |
2042 | { | |
726a989a | 2043 | tree label = gimple_block_label (e->dest); |
fd44f634 JH |
2044 | |
2045 | emit_jump (label_rtx (label)); | |
2046 | flags = 0; | |
275a4187 | 2047 | } |
fd44f634 JH |
2048 | else |
2049 | flags = EDGE_FALLTHRU; | |
242229bb JH |
2050 | |
2051 | init_block = create_basic_block (NEXT_INSN (get_insns ()), | |
2052 | get_last_insn (), | |
2053 | ENTRY_BLOCK_PTR); | |
2054 | init_block->frequency = ENTRY_BLOCK_PTR->frequency; | |
2055 | init_block->count = ENTRY_BLOCK_PTR->count; | |
2056 | if (e) | |
2057 | { | |
2058 | first_block = e->dest; | |
2059 | redirect_edge_succ (e, init_block); | |
fd44f634 | 2060 | e = make_edge (init_block, first_block, flags); |
242229bb JH |
2061 | } |
2062 | else | |
2063 | e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
2064 | e->probability = REG_BR_PROB_BASE; | |
2065 | e->count = ENTRY_BLOCK_PTR->count; | |
2066 | ||
2067 | update_bb_for_insn (init_block); | |
2068 | return init_block; | |
2069 | } | |
2070 | ||
55e092c4 JH |
2071 | /* For each lexical block, set BLOCK_NUMBER to the depth at which it is |
2072 | found in the block tree. */ | |
2073 | ||
2074 | static void | |
2075 | set_block_levels (tree block, int level) | |
2076 | { | |
2077 | while (block) | |
2078 | { | |
2079 | BLOCK_NUMBER (block) = level; | |
2080 | set_block_levels (BLOCK_SUBBLOCKS (block), level + 1); | |
2081 | block = BLOCK_CHAIN (block); | |
2082 | } | |
2083 | } | |
242229bb JH |
2084 | |
2085 | /* Create a block containing landing pads and similar stuff. */ | |
2086 | ||
2087 | static void | |
2088 | construct_exit_block (void) | |
2089 | { | |
2090 | rtx head = get_last_insn (); | |
2091 | rtx end; | |
2092 | basic_block exit_block; | |
628f6a4e BE |
2093 | edge e, e2; |
2094 | unsigned ix; | |
2095 | edge_iterator ei; | |
071a42f9 | 2096 | rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); |
242229bb | 2097 | |
bf08ebeb JH |
2098 | rtl_profile_for_bb (EXIT_BLOCK_PTR); |
2099 | ||
caf93cb0 | 2100 | /* Make sure the locus is set to the end of the function, so that |
242229bb | 2101 | epilogue line numbers and warnings are set properly. */ |
6773e15f | 2102 | if (cfun->function_end_locus != UNKNOWN_LOCATION) |
242229bb JH |
2103 | input_location = cfun->function_end_locus; |
2104 | ||
2105 | /* The following insns belong to the top scope. */ | |
55e092c4 | 2106 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
242229bb | 2107 | |
242229bb JH |
2108 | /* Generate rtl for function exit. */ |
2109 | expand_function_end (); | |
2110 | ||
2111 | end = get_last_insn (); | |
2112 | if (head == end) | |
2113 | return; | |
071a42f9 JH |
2114 | /* While emitting the function end we could move end of the last basic block. |
2115 | */ | |
2116 | BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | |
4b4bf941 | 2117 | while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
242229bb | 2118 | head = NEXT_INSN (head); |
80c7a9eb RH |
2119 | exit_block = create_basic_block (NEXT_INSN (head), end, |
2120 | EXIT_BLOCK_PTR->prev_bb); | |
242229bb JH |
2121 | exit_block->frequency = EXIT_BLOCK_PTR->frequency; |
2122 | exit_block->count = EXIT_BLOCK_PTR->count; | |
628f6a4e BE |
2123 | |
2124 | ix = 0; | |
2125 | while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | |
242229bb | 2126 | { |
8fb790fd | 2127 | e = EDGE_PRED (EXIT_BLOCK_PTR, ix); |
242229bb | 2128 | if (!(e->flags & EDGE_ABNORMAL)) |
628f6a4e BE |
2129 | redirect_edge_succ (e, exit_block); |
2130 | else | |
2131 | ix++; | |
242229bb | 2132 | } |
628f6a4e | 2133 | |
242229bb JH |
2134 | e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
2135 | e->probability = REG_BR_PROB_BASE; | |
2136 | e->count = EXIT_BLOCK_PTR->count; | |
628f6a4e | 2137 | FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) |
242229bb JH |
2138 | if (e2 != e) |
2139 | { | |
c22cacf3 | 2140 | e->count -= e2->count; |
242229bb JH |
2141 | exit_block->count -= e2->count; |
2142 | exit_block->frequency -= EDGE_FREQUENCY (e2); | |
2143 | } | |
2144 | if (e->count < 0) | |
2145 | e->count = 0; | |
2146 | if (exit_block->count < 0) | |
2147 | exit_block->count = 0; | |
2148 | if (exit_block->frequency < 0) | |
2149 | exit_block->frequency = 0; | |
2150 | update_bb_for_insn (exit_block); | |
2151 | } | |
2152 | ||
c22cacf3 | 2153 | /* Helper function for discover_nonconstant_array_refs. |
a1b23b2f UW |
2154 | Look for ARRAY_REF nodes with non-constant indexes and mark them |
2155 | addressable. */ | |
2156 | ||
2157 | static tree | |
2158 | discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | |
2159 | void *data ATTRIBUTE_UNUSED) | |
2160 | { | |
2161 | tree t = *tp; | |
2162 | ||
2163 | if (IS_TYPE_OR_DECL_P (t)) | |
2164 | *walk_subtrees = 0; | |
2165 | else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2166 | { | |
2167 | while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2168 | && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | |
2169 | && (!TREE_OPERAND (t, 2) | |
2170 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
2171 | || (TREE_CODE (t) == COMPONENT_REF | |
2172 | && (!TREE_OPERAND (t,2) | |
2173 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
2174 | || TREE_CODE (t) == BIT_FIELD_REF | |
2175 | || TREE_CODE (t) == REALPART_EXPR | |
2176 | || TREE_CODE (t) == IMAGPART_EXPR | |
2177 | || TREE_CODE (t) == VIEW_CONVERT_EXPR | |
1043771b | 2178 | || CONVERT_EXPR_P (t)) |
a1b23b2f UW |
2179 | t = TREE_OPERAND (t, 0); |
2180 | ||
2181 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
2182 | { | |
2183 | t = get_base_address (t); | |
2184 | if (t && DECL_P (t)) | |
2185 | TREE_ADDRESSABLE (t) = 1; | |
2186 | } | |
2187 | ||
2188 | *walk_subtrees = 0; | |
2189 | } | |
2190 | ||
2191 | return NULL_TREE; | |
2192 | } | |
2193 | ||
2194 | /* RTL expansion is not able to compile array references with variable | |
2195 | offsets for arrays stored in single register. Discover such | |
2196 | expressions and mark variables as addressable to avoid this | |
2197 | scenario. */ | |
2198 | ||
2199 | static void | |
2200 | discover_nonconstant_array_refs (void) | |
2201 | { | |
2202 | basic_block bb; | |
726a989a | 2203 | gimple_stmt_iterator gsi; |
a1b23b2f UW |
2204 | |
2205 | FOR_EACH_BB (bb) | |
726a989a RB |
2206 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
2207 | { | |
2208 | gimple stmt = gsi_stmt (gsi); | |
2209 | walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | |
2210 | } | |
a1b23b2f UW |
2211 | } |
2212 | ||
2e3f842f L |
2213 | /* This function sets crtl->args.internal_arg_pointer to a virtual |
2214 | register if DRAP is needed. Local register allocator will replace | |
2215 | virtual_incoming_args_rtx with the virtual register. */ | |
2216 | ||
2217 | static void | |
2218 | expand_stack_alignment (void) | |
2219 | { | |
2220 | rtx drap_rtx; | |
e939805b | 2221 | unsigned int preferred_stack_boundary; |
2e3f842f L |
2222 | |
2223 | if (! SUPPORTS_STACK_ALIGNMENT) | |
2224 | return; | |
2225 | ||
2226 | if (cfun->calls_alloca | |
2227 | || cfun->has_nonlocal_label | |
2228 | || crtl->has_nonlocal_goto) | |
2229 | crtl->need_drap = true; | |
2230 | ||
2231 | gcc_assert (crtl->stack_alignment_needed | |
2232 | <= crtl->stack_alignment_estimated); | |
2233 | ||
2e3f842f L |
2234 | /* Update crtl->stack_alignment_estimated and use it later to align |
2235 | stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | |
2236 | exceptions since callgraph doesn't collect incoming stack alignment | |
2237 | in this case. */ | |
2238 | if (flag_non_call_exceptions | |
2239 | && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) | |
2240 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
2241 | else | |
2242 | preferred_stack_boundary = crtl->preferred_stack_boundary; | |
2243 | if (preferred_stack_boundary > crtl->stack_alignment_estimated) | |
2244 | crtl->stack_alignment_estimated = preferred_stack_boundary; | |
2245 | if (preferred_stack_boundary > crtl->stack_alignment_needed) | |
2246 | crtl->stack_alignment_needed = preferred_stack_boundary; | |
2247 | ||
2248 | crtl->stack_realign_needed | |
e939805b | 2249 | = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; |
d2d93c32 | 2250 | crtl->stack_realign_tried = crtl->stack_realign_needed; |
2e3f842f L |
2251 | |
2252 | crtl->stack_realign_processed = true; | |
2253 | ||
2254 | /* Target has to redefine TARGET_GET_DRAP_RTX to support stack | |
2255 | alignment. */ | |
2256 | gcc_assert (targetm.calls.get_drap_rtx != NULL); | |
2257 | drap_rtx = targetm.calls.get_drap_rtx (); | |
2258 | ||
d015f7cc L |
2259 | /* stack_realign_drap and drap_rtx must match. */ |
2260 | gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); | |
2261 | ||
2e3f842f L |
2262 | /* Do nothing if NULL is returned, which means DRAP is not needed. */ |
2263 | if (NULL != drap_rtx) | |
2264 | { | |
2265 | crtl->args.internal_arg_pointer = drap_rtx; | |
2266 | ||
2267 | /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is | |
2268 | needed. */ | |
2269 | fixup_tail_calls (); | |
2270 | } | |
2271 | } | |
2272 | ||
242229bb JH |
2273 | /* Translate the intermediate representation contained in the CFG |
2274 | from GIMPLE trees to RTL. | |
2275 | ||
2276 | We do conversion per basic block and preserve/update the tree CFG. | |
2277 | This implies we have to do some magic as the CFG can simultaneously | |
2278 | consist of basic blocks containing RTL and GIMPLE trees. This can | |
61ada8ae | 2279 | confuse the CFG hooks, so be careful to not manipulate CFG during |
242229bb JH |
2280 | the expansion. */ |
2281 | ||
c2924966 | 2282 | static unsigned int |
726a989a | 2283 | gimple_expand_cfg (void) |
242229bb JH |
2284 | { |
2285 | basic_block bb, init_block; | |
2286 | sbitmap blocks; | |
0ef90296 ZD |
2287 | edge_iterator ei; |
2288 | edge e; | |
242229bb | 2289 | |
4586b4ca SB |
2290 | /* Some backends want to know that we are expanding to RTL. */ |
2291 | currently_expanding_to_rtl = 1; | |
2292 | ||
bf08ebeb JH |
2293 | rtl_profile_for_bb (ENTRY_BLOCK_PTR); |
2294 | ||
55e092c4 JH |
2295 | insn_locators_alloc (); |
2296 | if (!DECL_BUILT_IN (current_function_decl)) | |
1751ecd6 AH |
2297 | { |
2298 | /* Eventually, all FEs should explicitly set function_start_locus. */ | |
2299 | if (cfun->function_start_locus == UNKNOWN_LOCATION) | |
2300 | set_curr_insn_source_location | |
2301 | (DECL_SOURCE_LOCATION (current_function_decl)); | |
2302 | else | |
2303 | set_curr_insn_source_location (cfun->function_start_locus); | |
2304 | } | |
55e092c4 JH |
2305 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
2306 | prologue_locator = curr_insn_locator (); | |
2307 | ||
2308 | /* Make sure first insn is a note even if we don't want linenums. | |
2309 | This makes sure the first insn will never be deleted. | |
2310 | Also, final expects a note to appear there. */ | |
2311 | emit_note (NOTE_INSN_DELETED); | |
6429e3be | 2312 | |
a1b23b2f UW |
2313 | /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ |
2314 | discover_nonconstant_array_refs (); | |
2315 | ||
e41b2a33 | 2316 | targetm.expand_to_rtl_hook (); |
cb91fab0 | 2317 | crtl->stack_alignment_needed = STACK_BOUNDARY; |
2e3f842f L |
2318 | crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; |
2319 | crtl->stack_alignment_estimated = STACK_BOUNDARY; | |
cb91fab0 JH |
2320 | crtl->preferred_stack_boundary = STACK_BOUNDARY; |
2321 | cfun->cfg->max_jumptable_ents = 0; | |
2322 | ||
e41b2a33 | 2323 | |
727a31fa | 2324 | /* Expand the variables recorded during gimple lowering. */ |
242229bb JH |
2325 | expand_used_vars (); |
2326 | ||
7d69de61 RH |
2327 | /* Honor stack protection warnings. */ |
2328 | if (warn_stack_protect) | |
2329 | { | |
e3b5732b | 2330 | if (cfun->calls_alloca) |
c5409249 MLI |
2331 | warning (OPT_Wstack_protector, |
2332 | "not protecting local variables: variable length buffer"); | |
cb91fab0 | 2333 | if (has_short_buffer && !crtl->stack_protect_guard) |
c5409249 MLI |
2334 | warning (OPT_Wstack_protector, |
2335 | "not protecting function: no buffer at least %d bytes long", | |
7d69de61 RH |
2336 | (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); |
2337 | } | |
2338 | ||
242229bb | 2339 | /* Set up parameters and prepare for return, for the function. */ |
b79c5284 | 2340 | expand_function_start (current_function_decl); |
242229bb JH |
2341 | |
2342 | /* If this function is `main', emit a call to `__main' | |
2343 | to run global initializers, etc. */ | |
2344 | if (DECL_NAME (current_function_decl) | |
2345 | && MAIN_NAME_P (DECL_NAME (current_function_decl)) | |
2346 | && DECL_FILE_SCOPE_P (current_function_decl)) | |
2347 | expand_main_function (); | |
2348 | ||
7d69de61 RH |
2349 | /* Initialize the stack_protect_guard field. This must happen after the |
2350 | call to __main (if any) so that the external decl is initialized. */ | |
cb91fab0 | 2351 | if (crtl->stack_protect_guard) |
7d69de61 RH |
2352 | stack_protect_prologue (); |
2353 | ||
e939805b L |
2354 | /* Update stack boundary if needed. */ |
2355 | if (SUPPORTS_STACK_ALIGNMENT) | |
2356 | { | |
2357 | /* Call update_stack_boundary here to update incoming stack | |
2358 | boundary before TARGET_FUNCTION_OK_FOR_SIBCALL is called. | |
2359 | TARGET_FUNCTION_OK_FOR_SIBCALL needs to know the accurate | |
2360 | incoming stack alignment to check if it is OK to perform | |
2361 | sibcall optimization since sibcall optimization will only | |
2362 | align the outgoing stack to incoming stack boundary. */ | |
2363 | if (targetm.calls.update_stack_boundary) | |
2364 | targetm.calls.update_stack_boundary (); | |
2365 | ||
2366 | /* The incoming stack frame has to be aligned at least at | |
2367 | parm_stack_boundary. */ | |
2368 | gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
2369 | } | |
2370 | ||
3fbd86b1 | 2371 | /* Register rtl specific functions for cfg. */ |
242229bb JH |
2372 | rtl_register_cfg_hooks (); |
2373 | ||
2374 | init_block = construct_init_block (); | |
2375 | ||
0ef90296 | 2376 | /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the |
2a8a8292 | 2377 | remaining edges in expand_gimple_basic_block. */ |
0ef90296 ZD |
2378 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
2379 | e->flags &= ~EDGE_EXECUTABLE; | |
2380 | ||
8b11009b | 2381 | lab_rtx_for_bb = pointer_map_create (); |
242229bb | 2382 | FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) |
10d22567 | 2383 | bb = expand_gimple_basic_block (bb); |
bf08ebeb JH |
2384 | |
2385 | /* Expansion is used by optimization passes too, set maybe_hot_insn_p | |
2386 | conservatively to true until they are all profile aware. */ | |
8b11009b | 2387 | pointer_map_destroy (lab_rtx_for_bb); |
cb91fab0 | 2388 | free_histograms (); |
242229bb JH |
2389 | |
2390 | construct_exit_block (); | |
55e092c4 JH |
2391 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
2392 | insn_locators_finalize (); | |
242229bb | 2393 | |
4586b4ca SB |
2394 | /* We're done expanding trees to RTL. */ |
2395 | currently_expanding_to_rtl = 0; | |
2396 | ||
e8a2a782 | 2397 | /* Convert tree EH labels to RTL EH labels and zap the tree EH table. */ |
242229bb | 2398 | convert_from_eh_region_ranges (); |
e8a2a782 | 2399 | set_eh_throw_stmt_table (cfun, NULL); |
242229bb JH |
2400 | |
2401 | rebuild_jump_labels (get_insns ()); | |
2402 | find_exception_handler_labels (); | |
2403 | ||
2404 | blocks = sbitmap_alloc (last_basic_block); | |
2405 | sbitmap_ones (blocks); | |
2406 | find_many_sub_basic_blocks (blocks); | |
25cd19de | 2407 | purge_all_dead_edges (); |
242229bb JH |
2408 | sbitmap_free (blocks); |
2409 | ||
2410 | compact_blocks (); | |
2e3f842f L |
2411 | |
2412 | expand_stack_alignment (); | |
2413 | ||
242229bb | 2414 | #ifdef ENABLE_CHECKING |
62e5bf5d | 2415 | verify_flow_info (); |
242229bb | 2416 | #endif |
9f8628ba PB |
2417 | |
2418 | /* There's no need to defer outputting this function any more; we | |
2419 | know we want to output it. */ | |
2420 | DECL_DEFER_OUTPUT (current_function_decl) = 0; | |
2421 | ||
2422 | /* Now that we're done expanding trees to RTL, we shouldn't have any | |
2423 | more CONCATs anywhere. */ | |
2424 | generating_concat_p = 0; | |
2425 | ||
b7211528 SB |
2426 | if (dump_file) |
2427 | { | |
2428 | fprintf (dump_file, | |
2429 | "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | |
2430 | /* And the pass manager will dump RTL for us. */ | |
2431 | } | |
ef330312 PB |
2432 | |
2433 | /* If we're emitting a nested function, make sure its parent gets | |
2434 | emitted as well. Doing otherwise confuses debug info. */ | |
c22cacf3 | 2435 | { |
ef330312 PB |
2436 | tree parent; |
2437 | for (parent = DECL_CONTEXT (current_function_decl); | |
c22cacf3 MS |
2438 | parent != NULL_TREE; |
2439 | parent = get_containing_scope (parent)) | |
ef330312 | 2440 | if (TREE_CODE (parent) == FUNCTION_DECL) |
c22cacf3 | 2441 | TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
ef330312 | 2442 | } |
c22cacf3 | 2443 | |
ef330312 PB |
2444 | /* We are now committed to emitting code for this function. Do any |
2445 | preparation, such as emitting abstract debug info for the inline | |
2446 | before it gets mangled by optimization. */ | |
2447 | if (cgraph_function_possibly_inlined_p (current_function_decl)) | |
2448 | (*debug_hooks->outlining_inline_function) (current_function_decl); | |
2449 | ||
2450 | TREE_ASM_WRITTEN (current_function_decl) = 1; | |
4bb1e037 AP |
2451 | |
2452 | /* After expanding, the return labels are no longer needed. */ | |
2453 | return_label = NULL; | |
2454 | naked_return_label = NULL; | |
55e092c4 JH |
2455 | /* Tag the blocks with a depth number so that change_scope can find |
2456 | the common parent easily. */ | |
2457 | set_block_levels (DECL_INITIAL (cfun->decl), 0); | |
bf08ebeb | 2458 | default_rtl_profile (); |
c2924966 | 2459 | return 0; |
242229bb JH |
2460 | } |
2461 | ||
e3b5732b | 2462 | struct rtl_opt_pass pass_expand = |
242229bb | 2463 | { |
8ddbbcae | 2464 | { |
e3b5732b | 2465 | RTL_PASS, |
c22cacf3 | 2466 | "expand", /* name */ |
242229bb | 2467 | NULL, /* gate */ |
726a989a | 2468 | gimple_expand_cfg, /* execute */ |
242229bb JH |
2469 | NULL, /* sub */ |
2470 | NULL, /* next */ | |
2471 | 0, /* static_pass_number */ | |
c22cacf3 | 2472 | TV_EXPAND, /* tv_id */ |
242229bb JH |
2473 | /* ??? If TER is enabled, we actually receive GENERIC. */ |
2474 | PROP_gimple_leh | PROP_cfg, /* properties_required */ | |
2475 | PROP_rtl, /* properties_provided */ | |
bbbe4e7b | 2476 | PROP_trees, /* properties_destroyed */ |
242229bb | 2477 | 0, /* todo_flags_start */ |
ef330312 | 2478 | TODO_dump_func, /* todo_flags_finish */ |
8ddbbcae | 2479 | } |
242229bb | 2480 | }; |