]>
Commit | Line | Data |
---|---|---|
242229bb | 1 | /* A pass for lowering trees to RTL. |
7604eb4e | 2 | Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
66647d44 | 3 | Free Software Foundation, Inc. |
242229bb JH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
242229bb JH |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
242229bb JH |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "basic-block.h" | |
29 | #include "function.h" | |
30 | #include "expr.h" | |
31 | #include "langhooks.h" | |
32 | #include "tree-flow.h" | |
33 | #include "timevar.h" | |
34 | #include "tree-dump.h" | |
35 | #include "tree-pass.h" | |
36 | #include "except.h" | |
37 | #include "flags.h" | |
1f6d3a08 RH |
38 | #include "diagnostic.h" |
39 | #include "toplev.h" | |
ef330312 | 40 | #include "debug.h" |
7d69de61 | 41 | #include "params.h" |
ff28a94d | 42 | #include "tree-inline.h" |
6946b3f7 | 43 | #include "value-prof.h" |
e41b2a33 | 44 | #include "target.h" |
4e3825db | 45 | #include "ssaexpand.h" |
7d69de61 | 46 | |
726a989a | 47 | |
4e3825db MM |
48 | /* This variable holds information helping the rewriting of SSA trees |
49 | into RTL. */ | |
50 | struct ssaexpand SA; | |
51 | ||
a5883ba0 MM |
52 | /* This variable holds the currently expanded gimple statement for purposes |
53 | of comminucating the profile info to the builtin expanders. */ | |
54 | gimple currently_expanding_gimple_stmt; | |
55 | ||
726a989a RB |
56 | /* Return an expression tree corresponding to the RHS of GIMPLE |
57 | statement STMT. */ | |
58 | ||
59 | tree | |
60 | gimple_assign_rhs_to_tree (gimple stmt) | |
61 | { | |
62 | tree t; | |
82d6e6fc | 63 | enum gimple_rhs_class grhs_class; |
b8698a0f | 64 | |
82d6e6fc | 65 | grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
726a989a | 66 | |
82d6e6fc | 67 | if (grhs_class == GIMPLE_BINARY_RHS) |
726a989a RB |
68 | t = build2 (gimple_assign_rhs_code (stmt), |
69 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
70 | gimple_assign_rhs1 (stmt), | |
71 | gimple_assign_rhs2 (stmt)); | |
82d6e6fc | 72 | else if (grhs_class == GIMPLE_UNARY_RHS) |
726a989a RB |
73 | t = build1 (gimple_assign_rhs_code (stmt), |
74 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
75 | gimple_assign_rhs1 (stmt)); | |
82d6e6fc | 76 | else if (grhs_class == GIMPLE_SINGLE_RHS) |
b5b8b0ac AO |
77 | { |
78 | t = gimple_assign_rhs1 (stmt); | |
79 | /* Avoid modifying this tree in place below. */ | |
80 | if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) | |
81 | && gimple_location (stmt) != EXPR_LOCATION (t)) | |
82 | t = copy_node (t); | |
83 | } | |
726a989a RB |
84 | else |
85 | gcc_unreachable (); | |
86 | ||
f5045c96 AM |
87 | if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) |
88 | SET_EXPR_LOCATION (t, gimple_location (stmt)); | |
89 | ||
726a989a RB |
90 | return t; |
91 | } | |
92 | ||
726a989a | 93 | |
e53de54d JH |
94 | /* Verify that there is exactly single jump instruction since last and attach |
95 | REG_BR_PROB note specifying probability. | |
96 | ??? We really ought to pass the probability down to RTL expanders and let it | |
d7e9e62a KH |
97 | re-distribute it when the conditional expands into multiple conditionals. |
98 | This is however difficult to do. */ | |
ef950eba | 99 | void |
10d22567 | 100 | add_reg_br_prob_note (rtx last, int probability) |
e53de54d JH |
101 | { |
102 | if (profile_status == PROFILE_ABSENT) | |
103 | return; | |
104 | for (last = NEXT_INSN (last); last && NEXT_INSN (last); last = NEXT_INSN (last)) | |
2ca202e7 | 105 | if (JUMP_P (last)) |
e53de54d JH |
106 | { |
107 | /* It is common to emit condjump-around-jump sequence when we don't know | |
108 | how to reverse the conditional. Special case this. */ | |
109 | if (!any_condjump_p (last) | |
2ca202e7 | 110 | || !JUMP_P (NEXT_INSN (last)) |
e53de54d | 111 | || !simplejump_p (NEXT_INSN (last)) |
fa1ff4eb | 112 | || !NEXT_INSN (NEXT_INSN (last)) |
2ca202e7 | 113 | || !BARRIER_P (NEXT_INSN (NEXT_INSN (last))) |
fa1ff4eb | 114 | || !NEXT_INSN (NEXT_INSN (NEXT_INSN (last))) |
2ca202e7 | 115 | || !LABEL_P (NEXT_INSN (NEXT_INSN (NEXT_INSN (last)))) |
e53de54d JH |
116 | || NEXT_INSN (NEXT_INSN (NEXT_INSN (NEXT_INSN (last))))) |
117 | goto failed; | |
41806d92 | 118 | gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); |
65c5f2a6 ILT |
119 | add_reg_note (last, REG_BR_PROB, |
120 | GEN_INT (REG_BR_PROB_BASE - probability)); | |
e53de54d JH |
121 | return; |
122 | } | |
2ca202e7 | 123 | if (!last || !JUMP_P (last) || !any_condjump_p (last)) |
41806d92 NS |
124 | goto failed; |
125 | gcc_assert (!find_reg_note (last, REG_BR_PROB, 0)); | |
65c5f2a6 | 126 | add_reg_note (last, REG_BR_PROB, GEN_INT (probability)); |
e53de54d JH |
127 | return; |
128 | failed: | |
129 | if (dump_file) | |
130 | fprintf (dump_file, "Failed to add probability note\n"); | |
131 | } | |
132 | ||
80c7a9eb | 133 | |
1f6d3a08 RH |
134 | #ifndef STACK_ALIGNMENT_NEEDED |
135 | #define STACK_ALIGNMENT_NEEDED 1 | |
136 | #endif | |
137 | ||
4e3825db MM |
138 | #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) |
139 | ||
140 | /* Associate declaration T with storage space X. If T is no | |
141 | SSA name this is exactly SET_DECL_RTL, otherwise make the | |
142 | partition of T associated with X. */ | |
143 | static inline void | |
144 | set_rtl (tree t, rtx x) | |
145 | { | |
146 | if (TREE_CODE (t) == SSA_NAME) | |
147 | { | |
148 | SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | |
149 | if (x && !MEM_P (x)) | |
150 | set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | |
eb7adebc MM |
151 | /* For the benefit of debug information at -O0 (where vartracking |
152 | doesn't run) record the place also in the base DECL if it's | |
153 | a normal variable (not a parameter). */ | |
154 | if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | |
155 | { | |
156 | tree var = SSA_NAME_VAR (t); | |
157 | /* If we don't yet have something recorded, just record it now. */ | |
158 | if (!DECL_RTL_SET_P (var)) | |
159 | SET_DECL_RTL (var, x); | |
160 | /* If we have it set alrady to "multiple places" don't | |
161 | change this. */ | |
162 | else if (DECL_RTL (var) == pc_rtx) | |
163 | ; | |
164 | /* If we have something recorded and it's not the same place | |
165 | as we want to record now, we have multiple partitions for the | |
166 | same base variable, with different places. We can't just | |
167 | randomly chose one, hence we have to say that we don't know. | |
168 | This only happens with optimization, and there var-tracking | |
169 | will figure out the right thing. */ | |
170 | else if (DECL_RTL (var) != x) | |
171 | SET_DECL_RTL (var, pc_rtx); | |
172 | } | |
4e3825db MM |
173 | } |
174 | else | |
175 | SET_DECL_RTL (t, x); | |
176 | } | |
1f6d3a08 RH |
177 | |
178 | /* This structure holds data relevant to one variable that will be | |
179 | placed in a stack slot. */ | |
180 | struct stack_var | |
181 | { | |
182 | /* The Variable. */ | |
183 | tree decl; | |
184 | ||
185 | /* The offset of the variable. During partitioning, this is the | |
186 | offset relative to the partition. After partitioning, this | |
187 | is relative to the stack frame. */ | |
188 | HOST_WIDE_INT offset; | |
189 | ||
190 | /* Initially, the size of the variable. Later, the size of the partition, | |
191 | if this variable becomes it's partition's representative. */ | |
192 | HOST_WIDE_INT size; | |
193 | ||
194 | /* The *byte* alignment required for this variable. Or as, with the | |
195 | size, the alignment for this partition. */ | |
196 | unsigned int alignb; | |
197 | ||
198 | /* The partition representative. */ | |
199 | size_t representative; | |
200 | ||
201 | /* The next stack variable in the partition, or EOC. */ | |
202 | size_t next; | |
2bdbbe94 MM |
203 | |
204 | /* The numbers of conflicting stack variables. */ | |
205 | bitmap conflicts; | |
1f6d3a08 RH |
206 | }; |
207 | ||
208 | #define EOC ((size_t)-1) | |
209 | ||
210 | /* We have an array of such objects while deciding allocation. */ | |
211 | static struct stack_var *stack_vars; | |
212 | static size_t stack_vars_alloc; | |
213 | static size_t stack_vars_num; | |
214 | ||
fa10beec | 215 | /* An array of indices such that stack_vars[stack_vars_sorted[i]].size |
1f6d3a08 RH |
216 | is non-decreasing. */ |
217 | static size_t *stack_vars_sorted; | |
218 | ||
1f6d3a08 RH |
219 | /* The phase of the stack frame. This is the known misalignment of |
220 | virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, | |
221 | (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ | |
222 | static int frame_phase; | |
223 | ||
7d69de61 RH |
224 | /* Used during expand_used_vars to remember if we saw any decls for |
225 | which we'd like to enable stack smashing protection. */ | |
226 | static bool has_protected_decls; | |
227 | ||
228 | /* Used during expand_used_vars. Remember if we say a character buffer | |
229 | smaller than our cutoff threshold. Used for -Wstack-protector. */ | |
230 | static bool has_short_buffer; | |
1f6d3a08 RH |
231 | |
232 | /* Discover the byte alignment to use for DECL. Ignore alignment | |
233 | we can't do with expected alignment of the stack boundary. */ | |
234 | ||
235 | static unsigned int | |
236 | get_decl_align_unit (tree decl) | |
237 | { | |
238 | unsigned int align; | |
239 | ||
9bfaf89d | 240 | align = LOCAL_DECL_ALIGNMENT (decl); |
2e3f842f L |
241 | |
242 | if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
243 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
244 | ||
245 | if (SUPPORTS_STACK_ALIGNMENT) | |
246 | { | |
247 | if (crtl->stack_alignment_estimated < align) | |
248 | { | |
249 | gcc_assert(!crtl->stack_realign_processed); | |
250 | crtl->stack_alignment_estimated = align; | |
251 | } | |
252 | } | |
253 | ||
254 | /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
255 | So here we only make sure stack_alignment_needed >= align. */ | |
cb91fab0 JH |
256 | if (crtl->stack_alignment_needed < align) |
257 | crtl->stack_alignment_needed = align; | |
f85882d8 JY |
258 | if (crtl->max_used_stack_slot_alignment < align) |
259 | crtl->max_used_stack_slot_alignment = align; | |
1f6d3a08 RH |
260 | |
261 | return align / BITS_PER_UNIT; | |
262 | } | |
263 | ||
264 | /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | |
265 | Return the frame offset. */ | |
266 | ||
267 | static HOST_WIDE_INT | |
268 | alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) | |
269 | { | |
270 | HOST_WIDE_INT offset, new_frame_offset; | |
271 | ||
272 | new_frame_offset = frame_offset; | |
273 | if (FRAME_GROWS_DOWNWARD) | |
274 | { | |
275 | new_frame_offset -= size + frame_phase; | |
276 | new_frame_offset &= -align; | |
277 | new_frame_offset += frame_phase; | |
278 | offset = new_frame_offset; | |
279 | } | |
280 | else | |
281 | { | |
282 | new_frame_offset -= frame_phase; | |
283 | new_frame_offset += align - 1; | |
284 | new_frame_offset &= -align; | |
285 | new_frame_offset += frame_phase; | |
286 | offset = new_frame_offset; | |
287 | new_frame_offset += size; | |
288 | } | |
289 | frame_offset = new_frame_offset; | |
290 | ||
9fb798d7 EB |
291 | if (frame_offset_overflow (frame_offset, cfun->decl)) |
292 | frame_offset = offset = 0; | |
293 | ||
1f6d3a08 RH |
294 | return offset; |
295 | } | |
296 | ||
297 | /* Accumulate DECL into STACK_VARS. */ | |
298 | ||
299 | static void | |
300 | add_stack_var (tree decl) | |
301 | { | |
302 | if (stack_vars_num >= stack_vars_alloc) | |
303 | { | |
304 | if (stack_vars_alloc) | |
305 | stack_vars_alloc = stack_vars_alloc * 3 / 2; | |
306 | else | |
307 | stack_vars_alloc = 32; | |
308 | stack_vars | |
309 | = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | |
310 | } | |
311 | stack_vars[stack_vars_num].decl = decl; | |
312 | stack_vars[stack_vars_num].offset = 0; | |
4e3825db MM |
313 | stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); |
314 | stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl)); | |
1f6d3a08 RH |
315 | |
316 | /* All variables are initially in their own partition. */ | |
317 | stack_vars[stack_vars_num].representative = stack_vars_num; | |
318 | stack_vars[stack_vars_num].next = EOC; | |
319 | ||
2bdbbe94 MM |
320 | /* All variables initially conflict with no other. */ |
321 | stack_vars[stack_vars_num].conflicts = NULL; | |
322 | ||
1f6d3a08 | 323 | /* Ensure that this decl doesn't get put onto the list twice. */ |
4e3825db | 324 | set_rtl (decl, pc_rtx); |
1f6d3a08 RH |
325 | |
326 | stack_vars_num++; | |
327 | } | |
328 | ||
1f6d3a08 RH |
329 | /* Make the decls associated with luid's X and Y conflict. */ |
330 | ||
331 | static void | |
332 | add_stack_var_conflict (size_t x, size_t y) | |
333 | { | |
2bdbbe94 MM |
334 | struct stack_var *a = &stack_vars[x]; |
335 | struct stack_var *b = &stack_vars[y]; | |
336 | if (!a->conflicts) | |
337 | a->conflicts = BITMAP_ALLOC (NULL); | |
338 | if (!b->conflicts) | |
339 | b->conflicts = BITMAP_ALLOC (NULL); | |
340 | bitmap_set_bit (a->conflicts, y); | |
341 | bitmap_set_bit (b->conflicts, x); | |
1f6d3a08 RH |
342 | } |
343 | ||
344 | /* Check whether the decls associated with luid's X and Y conflict. */ | |
345 | ||
346 | static bool | |
347 | stack_var_conflict_p (size_t x, size_t y) | |
348 | { | |
2bdbbe94 MM |
349 | struct stack_var *a = &stack_vars[x]; |
350 | struct stack_var *b = &stack_vars[y]; | |
351 | if (!a->conflicts || !b->conflicts) | |
352 | return false; | |
353 | return bitmap_bit_p (a->conflicts, y); | |
1f6d3a08 | 354 | } |
b8698a0f | 355 | |
d239ed56 SB |
356 | /* Returns true if TYPE is or contains a union type. */ |
357 | ||
358 | static bool | |
359 | aggregate_contains_union_type (tree type) | |
360 | { | |
361 | tree field; | |
362 | ||
363 | if (TREE_CODE (type) == UNION_TYPE | |
364 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
365 | return true; | |
366 | if (TREE_CODE (type) == ARRAY_TYPE) | |
367 | return aggregate_contains_union_type (TREE_TYPE (type)); | |
368 | if (TREE_CODE (type) != RECORD_TYPE) | |
369 | return false; | |
370 | ||
371 | for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
372 | if (TREE_CODE (field) == FIELD_DECL) | |
373 | if (aggregate_contains_union_type (TREE_TYPE (field))) | |
374 | return true; | |
375 | ||
376 | return false; | |
377 | } | |
378 | ||
1f6d3a08 RH |
379 | /* A subroutine of expand_used_vars. If two variables X and Y have alias |
380 | sets that do not conflict, then do add a conflict for these variables | |
d239ed56 SB |
381 | in the interference graph. We also need to make sure to add conflicts |
382 | for union containing structures. Else RTL alias analysis comes along | |
383 | and due to type based aliasing rules decides that for two overlapping | |
384 | union temporaries { short s; int i; } accesses to the same mem through | |
385 | different types may not alias and happily reorders stores across | |
386 | life-time boundaries of the temporaries (See PR25654). | |
387 | We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ | |
1f6d3a08 RH |
388 | |
389 | static void | |
390 | add_alias_set_conflicts (void) | |
391 | { | |
392 | size_t i, j, n = stack_vars_num; | |
393 | ||
394 | for (i = 0; i < n; ++i) | |
395 | { | |
a4d25453 RH |
396 | tree type_i = TREE_TYPE (stack_vars[i].decl); |
397 | bool aggr_i = AGGREGATE_TYPE_P (type_i); | |
d239ed56 | 398 | bool contains_union; |
1f6d3a08 | 399 | |
d239ed56 | 400 | contains_union = aggregate_contains_union_type (type_i); |
1f6d3a08 RH |
401 | for (j = 0; j < i; ++j) |
402 | { | |
a4d25453 RH |
403 | tree type_j = TREE_TYPE (stack_vars[j].decl); |
404 | bool aggr_j = AGGREGATE_TYPE_P (type_j); | |
d239ed56 SB |
405 | if (aggr_i != aggr_j |
406 | /* Either the objects conflict by means of type based | |
407 | aliasing rules, or we need to add a conflict. */ | |
408 | || !objects_must_conflict_p (type_i, type_j) | |
409 | /* In case the types do not conflict ensure that access | |
410 | to elements will conflict. In case of unions we have | |
411 | to be careful as type based aliasing rules may say | |
412 | access to the same memory does not conflict. So play | |
413 | safe and add a conflict in this case. */ | |
414 | || contains_union) | |
1f6d3a08 RH |
415 | add_stack_var_conflict (i, j); |
416 | } | |
417 | } | |
418 | } | |
419 | ||
420 | /* A subroutine of partition_stack_vars. A comparison function for qsort, | |
4e3825db | 421 | sorting an array of indices by the size and type of the object. */ |
1f6d3a08 RH |
422 | |
423 | static int | |
424 | stack_var_size_cmp (const void *a, const void *b) | |
425 | { | |
426 | HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | |
427 | HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | |
4e3825db MM |
428 | tree decla, declb; |
429 | unsigned int uida, uidb; | |
1f6d3a08 RH |
430 | |
431 | if (sa < sb) | |
432 | return -1; | |
433 | if (sa > sb) | |
434 | return 1; | |
4e3825db MM |
435 | decla = stack_vars[*(const size_t *)a].decl; |
436 | declb = stack_vars[*(const size_t *)b].decl; | |
437 | /* For stack variables of the same size use and id of the decls | |
438 | to make the sort stable. Two SSA names are compared by their | |
439 | version, SSA names come before non-SSA names, and two normal | |
440 | decls are compared by their DECL_UID. */ | |
441 | if (TREE_CODE (decla) == SSA_NAME) | |
442 | { | |
443 | if (TREE_CODE (declb) == SSA_NAME) | |
444 | uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | |
445 | else | |
446 | return -1; | |
447 | } | |
448 | else if (TREE_CODE (declb) == SSA_NAME) | |
449 | return 1; | |
450 | else | |
451 | uida = DECL_UID (decla), uidb = DECL_UID (declb); | |
79f802f5 RG |
452 | if (uida < uidb) |
453 | return -1; | |
454 | if (uida > uidb) | |
455 | return 1; | |
1f6d3a08 RH |
456 | return 0; |
457 | } | |
458 | ||
55b34b5f RG |
459 | |
460 | /* If the points-to solution *PI points to variables that are in a partition | |
461 | together with other variables add all partition members to the pointed-to | |
462 | variables bitmap. */ | |
463 | ||
464 | static void | |
465 | add_partitioned_vars_to_ptset (struct pt_solution *pt, | |
466 | struct pointer_map_t *decls_to_partitions, | |
467 | struct pointer_set_t *visited, bitmap temp) | |
468 | { | |
469 | bitmap_iterator bi; | |
470 | unsigned i; | |
471 | bitmap *part; | |
472 | ||
473 | if (pt->anything | |
474 | || pt->vars == NULL | |
475 | /* The pointed-to vars bitmap is shared, it is enough to | |
476 | visit it once. */ | |
477 | || pointer_set_insert(visited, pt->vars)) | |
478 | return; | |
479 | ||
480 | bitmap_clear (temp); | |
481 | ||
482 | /* By using a temporary bitmap to store all members of the partitions | |
483 | we have to add we make sure to visit each of the partitions only | |
484 | once. */ | |
485 | EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | |
486 | if ((!temp | |
487 | || !bitmap_bit_p (temp, i)) | |
488 | && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | |
489 | (void *)(size_t) i))) | |
490 | bitmap_ior_into (temp, *part); | |
491 | if (!bitmap_empty_p (temp)) | |
492 | bitmap_ior_into (pt->vars, temp); | |
493 | } | |
494 | ||
495 | /* Update points-to sets based on partition info, so we can use them on RTL. | |
496 | The bitmaps representing stack partitions will be saved until expand, | |
497 | where partitioned decls used as bases in memory expressions will be | |
498 | rewritten. */ | |
499 | ||
500 | static void | |
501 | update_alias_info_with_stack_vars (void) | |
502 | { | |
503 | struct pointer_map_t *decls_to_partitions = NULL; | |
504 | size_t i, j; | |
505 | tree var = NULL_TREE; | |
506 | ||
507 | for (i = 0; i < stack_vars_num; i++) | |
508 | { | |
509 | bitmap part = NULL; | |
510 | tree name; | |
511 | struct ptr_info_def *pi; | |
512 | ||
513 | /* Not interested in partitions with single variable. */ | |
514 | if (stack_vars[i].representative != i | |
515 | || stack_vars[i].next == EOC) | |
516 | continue; | |
517 | ||
518 | if (!decls_to_partitions) | |
519 | { | |
520 | decls_to_partitions = pointer_map_create (); | |
521 | cfun->gimple_df->decls_to_pointers = pointer_map_create (); | |
522 | } | |
523 | ||
524 | /* Create an SSA_NAME that points to the partition for use | |
525 | as base during alias-oracle queries on RTL for bases that | |
526 | have been partitioned. */ | |
527 | if (var == NULL_TREE) | |
528 | var = create_tmp_var (ptr_type_node, NULL); | |
529 | name = make_ssa_name (var, NULL); | |
530 | ||
531 | /* Create bitmaps representing partitions. They will be used for | |
532 | points-to sets later, so use GGC alloc. */ | |
533 | part = BITMAP_GGC_ALLOC (); | |
534 | for (j = i; j != EOC; j = stack_vars[j].next) | |
535 | { | |
536 | tree decl = stack_vars[j].decl; | |
537 | unsigned int uid = DECL_UID (decl); | |
538 | /* We should never end up partitioning SSA names (though they | |
539 | may end up on the stack). Neither should we allocate stack | |
540 | space to something that is unused and thus unreferenced. */ | |
541 | gcc_assert (DECL_P (decl) | |
542 | && referenced_var_lookup (uid)); | |
543 | bitmap_set_bit (part, uid); | |
544 | *((bitmap *) pointer_map_insert (decls_to_partitions, | |
545 | (void *)(size_t) uid)) = part; | |
546 | *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | |
547 | decl)) = name; | |
548 | } | |
549 | ||
550 | /* Make the SSA name point to all partition members. */ | |
551 | pi = get_ptr_info (name); | |
552 | pt_solution_set (&pi->pt, part); | |
553 | } | |
554 | ||
555 | /* Make all points-to sets that contain one member of a partition | |
556 | contain all members of the partition. */ | |
557 | if (decls_to_partitions) | |
558 | { | |
559 | unsigned i; | |
560 | struct pointer_set_t *visited = pointer_set_create (); | |
561 | bitmap temp = BITMAP_ALLOC (NULL); | |
562 | ||
563 | for (i = 1; i < num_ssa_names; i++) | |
564 | { | |
565 | tree name = ssa_name (i); | |
566 | struct ptr_info_def *pi; | |
567 | ||
568 | if (name | |
569 | && POINTER_TYPE_P (TREE_TYPE (name)) | |
570 | && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | |
571 | add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | |
572 | visited, temp); | |
573 | } | |
574 | ||
575 | add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | |
576 | decls_to_partitions, visited, temp); | |
577 | add_partitioned_vars_to_ptset (&cfun->gimple_df->callused, | |
578 | decls_to_partitions, visited, temp); | |
579 | ||
580 | pointer_set_destroy (visited); | |
581 | pointer_map_destroy (decls_to_partitions); | |
582 | BITMAP_FREE (temp); | |
583 | } | |
584 | } | |
585 | ||
1f6d3a08 RH |
586 | /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND |
587 | partitioning algorithm. Partitions A and B are known to be non-conflicting. | |
588 | Merge them into a single partition A. | |
589 | ||
590 | At the same time, add OFFSET to all variables in partition B. At the end | |
591 | of the partitioning process we've have a nice block easy to lay out within | |
592 | the stack frame. */ | |
593 | ||
594 | static void | |
595 | union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | |
596 | { | |
597 | size_t i, last; | |
2bdbbe94 MM |
598 | struct stack_var *vb = &stack_vars[b]; |
599 | bitmap_iterator bi; | |
600 | unsigned u; | |
1f6d3a08 RH |
601 | |
602 | /* Update each element of partition B with the given offset, | |
603 | and merge them into partition A. */ | |
604 | for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | |
605 | { | |
606 | stack_vars[i].offset += offset; | |
607 | stack_vars[i].representative = a; | |
608 | } | |
609 | stack_vars[last].next = stack_vars[a].next; | |
610 | stack_vars[a].next = b; | |
611 | ||
612 | /* Update the required alignment of partition A to account for B. */ | |
613 | if (stack_vars[a].alignb < stack_vars[b].alignb) | |
614 | stack_vars[a].alignb = stack_vars[b].alignb; | |
615 | ||
616 | /* Update the interference graph and merge the conflicts. */ | |
2bdbbe94 MM |
617 | if (vb->conflicts) |
618 | { | |
619 | EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) | |
620 | add_stack_var_conflict (a, stack_vars[u].representative); | |
621 | BITMAP_FREE (vb->conflicts); | |
622 | } | |
1f6d3a08 RH |
623 | } |
624 | ||
625 | /* A subroutine of expand_used_vars. Binpack the variables into | |
626 | partitions constrained by the interference graph. The overall | |
627 | algorithm used is as follows: | |
628 | ||
629 | Sort the objects by size. | |
630 | For each object A { | |
631 | S = size(A) | |
632 | O = 0 | |
633 | loop { | |
634 | Look for the largest non-conflicting object B with size <= S. | |
635 | UNION (A, B) | |
636 | offset(B) = O | |
637 | O += size(B) | |
638 | S -= size(B) | |
639 | } | |
640 | } | |
641 | */ | |
642 | ||
643 | static void | |
644 | partition_stack_vars (void) | |
645 | { | |
646 | size_t si, sj, n = stack_vars_num; | |
647 | ||
648 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
649 | for (si = 0; si < n; ++si) | |
650 | stack_vars_sorted[si] = si; | |
651 | ||
652 | if (n == 1) | |
653 | return; | |
654 | ||
655 | qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | |
656 | ||
1f6d3a08 RH |
657 | for (si = 0; si < n; ++si) |
658 | { | |
659 | size_t i = stack_vars_sorted[si]; | |
660 | HOST_WIDE_INT isize = stack_vars[i].size; | |
661 | HOST_WIDE_INT offset = 0; | |
662 | ||
663 | for (sj = si; sj-- > 0; ) | |
664 | { | |
665 | size_t j = stack_vars_sorted[sj]; | |
666 | HOST_WIDE_INT jsize = stack_vars[j].size; | |
667 | unsigned int jalign = stack_vars[j].alignb; | |
668 | ||
669 | /* Ignore objects that aren't partition representatives. */ | |
670 | if (stack_vars[j].representative != j) | |
671 | continue; | |
672 | ||
673 | /* Ignore objects too large for the remaining space. */ | |
674 | if (isize < jsize) | |
675 | continue; | |
676 | ||
677 | /* Ignore conflicting objects. */ | |
678 | if (stack_var_conflict_p (i, j)) | |
679 | continue; | |
680 | ||
681 | /* Refine the remaining space check to include alignment. */ | |
682 | if (offset & (jalign - 1)) | |
683 | { | |
684 | HOST_WIDE_INT toff = offset; | |
685 | toff += jalign - 1; | |
686 | toff &= -(HOST_WIDE_INT)jalign; | |
687 | if (isize - (toff - offset) < jsize) | |
688 | continue; | |
689 | ||
690 | isize -= toff - offset; | |
691 | offset = toff; | |
692 | } | |
693 | ||
694 | /* UNION the objects, placing J at OFFSET. */ | |
695 | union_stack_vars (i, j, offset); | |
696 | ||
697 | isize -= jsize; | |
698 | if (isize == 0) | |
699 | break; | |
700 | } | |
701 | } | |
55b34b5f | 702 | |
0b200b80 RG |
703 | if (optimize) |
704 | update_alias_info_with_stack_vars (); | |
1f6d3a08 RH |
705 | } |
706 | ||
707 | /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | |
708 | ||
709 | static void | |
710 | dump_stack_var_partition (void) | |
711 | { | |
712 | size_t si, i, j, n = stack_vars_num; | |
713 | ||
714 | for (si = 0; si < n; ++si) | |
715 | { | |
716 | i = stack_vars_sorted[si]; | |
717 | ||
718 | /* Skip variables that aren't partition representatives, for now. */ | |
719 | if (stack_vars[i].representative != i) | |
720 | continue; | |
721 | ||
722 | fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | |
723 | " align %u\n", (unsigned long) i, stack_vars[i].size, | |
724 | stack_vars[i].alignb); | |
725 | ||
726 | for (j = i; j != EOC; j = stack_vars[j].next) | |
727 | { | |
728 | fputc ('\t', dump_file); | |
729 | print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | |
730 | fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | |
1c50a20a | 731 | stack_vars[j].offset); |
1f6d3a08 RH |
732 | } |
733 | } | |
734 | } | |
735 | ||
736 | /* Assign rtl to DECL at frame offset OFFSET. */ | |
737 | ||
738 | static void | |
739 | expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | |
740 | { | |
2ac26e15 L |
741 | /* Alignment is unsigned. */ |
742 | unsigned HOST_WIDE_INT align; | |
1f6d3a08 | 743 | rtx x; |
c22cacf3 | 744 | |
1f6d3a08 RH |
745 | /* If this fails, we've overflowed the stack frame. Error nicely? */ |
746 | gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | |
747 | ||
748 | x = plus_constant (virtual_stack_vars_rtx, offset); | |
4e3825db | 749 | x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); |
1f6d3a08 | 750 | |
4e3825db MM |
751 | if (TREE_CODE (decl) != SSA_NAME) |
752 | { | |
753 | /* Set alignment we actually gave this decl if it isn't an SSA name. | |
754 | If it is we generate stack slots only accidentally so it isn't as | |
755 | important, we'll simply use the alignment that is already set. */ | |
756 | offset -= frame_phase; | |
757 | align = offset & -offset; | |
758 | align *= BITS_PER_UNIT; | |
759 | if (align == 0) | |
760 | align = STACK_BOUNDARY; | |
761 | else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
762 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
763 | ||
764 | DECL_ALIGN (decl) = align; | |
765 | DECL_USER_ALIGN (decl) = 0; | |
766 | } | |
767 | ||
768 | set_mem_attributes (x, SSAVAR (decl), true); | |
769 | set_rtl (decl, x); | |
1f6d3a08 RH |
770 | } |
771 | ||
772 | /* A subroutine of expand_used_vars. Give each partition representative | |
773 | a unique location within the stack frame. Update each partition member | |
774 | with that location. */ | |
775 | ||
776 | static void | |
7d69de61 | 777 | expand_stack_vars (bool (*pred) (tree)) |
1f6d3a08 RH |
778 | { |
779 | size_t si, i, j, n = stack_vars_num; | |
780 | ||
781 | for (si = 0; si < n; ++si) | |
782 | { | |
783 | HOST_WIDE_INT offset; | |
784 | ||
785 | i = stack_vars_sorted[si]; | |
786 | ||
787 | /* Skip variables that aren't partition representatives, for now. */ | |
788 | if (stack_vars[i].representative != i) | |
789 | continue; | |
790 | ||
7d69de61 RH |
791 | /* Skip variables that have already had rtl assigned. See also |
792 | add_stack_var where we perpetrate this pc_rtx hack. */ | |
4e3825db MM |
793 | if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME |
794 | ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)] | |
795 | : DECL_RTL (stack_vars[i].decl)) != pc_rtx) | |
7d69de61 RH |
796 | continue; |
797 | ||
c22cacf3 | 798 | /* Check the predicate to see whether this variable should be |
7d69de61 RH |
799 | allocated in this pass. */ |
800 | if (pred && !pred (stack_vars[i].decl)) | |
801 | continue; | |
802 | ||
1f6d3a08 RH |
803 | offset = alloc_stack_frame_space (stack_vars[i].size, |
804 | stack_vars[i].alignb); | |
805 | ||
806 | /* Create rtl for each variable based on their location within the | |
807 | partition. */ | |
808 | for (j = i; j != EOC; j = stack_vars[j].next) | |
f8da8190 AP |
809 | { |
810 | gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | |
811 | expand_one_stack_var_at (stack_vars[j].decl, | |
812 | stack_vars[j].offset + offset); | |
813 | } | |
1f6d3a08 RH |
814 | } |
815 | } | |
816 | ||
ff28a94d JH |
817 | /* Take into account all sizes of partitions and reset DECL_RTLs. */ |
818 | static HOST_WIDE_INT | |
819 | account_stack_vars (void) | |
820 | { | |
821 | size_t si, j, i, n = stack_vars_num; | |
822 | HOST_WIDE_INT size = 0; | |
823 | ||
824 | for (si = 0; si < n; ++si) | |
825 | { | |
826 | i = stack_vars_sorted[si]; | |
827 | ||
828 | /* Skip variables that aren't partition representatives, for now. */ | |
829 | if (stack_vars[i].representative != i) | |
830 | continue; | |
831 | ||
832 | size += stack_vars[i].size; | |
833 | for (j = i; j != EOC; j = stack_vars[j].next) | |
4e3825db | 834 | set_rtl (stack_vars[j].decl, NULL); |
ff28a94d JH |
835 | } |
836 | return size; | |
837 | } | |
838 | ||
1f6d3a08 RH |
839 | /* A subroutine of expand_one_var. Called to immediately assign rtl |
840 | to a variable to be allocated in the stack frame. */ | |
841 | ||
842 | static void | |
843 | expand_one_stack_var (tree var) | |
844 | { | |
845 | HOST_WIDE_INT size, offset, align; | |
846 | ||
4e3825db MM |
847 | size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); |
848 | align = get_decl_align_unit (SSAVAR (var)); | |
1f6d3a08 RH |
849 | offset = alloc_stack_frame_space (size, align); |
850 | ||
851 | expand_one_stack_var_at (var, offset); | |
852 | } | |
853 | ||
1f6d3a08 RH |
854 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
855 | that will reside in a hard register. */ | |
856 | ||
857 | static void | |
858 | expand_one_hard_reg_var (tree var) | |
859 | { | |
860 | rest_of_decl_compilation (var, 0, 0); | |
861 | } | |
862 | ||
863 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | |
864 | that will reside in a pseudo register. */ | |
865 | ||
866 | static void | |
867 | expand_one_register_var (tree var) | |
868 | { | |
4e3825db MM |
869 | tree decl = SSAVAR (var); |
870 | tree type = TREE_TYPE (decl); | |
cde0f3fd | 871 | enum machine_mode reg_mode = promote_decl_mode (decl, NULL); |
1f6d3a08 RH |
872 | rtx x = gen_reg_rtx (reg_mode); |
873 | ||
4e3825db | 874 | set_rtl (var, x); |
1f6d3a08 RH |
875 | |
876 | /* Note if the object is a user variable. */ | |
4e3825db MM |
877 | if (!DECL_ARTIFICIAL (decl)) |
878 | mark_user_reg (x); | |
1f6d3a08 | 879 | |
61021c2c | 880 | if (POINTER_TYPE_P (type)) |
4e3825db | 881 | mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type))); |
1f6d3a08 RH |
882 | } |
883 | ||
884 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | |
128a79fb | 885 | has some associated error, e.g. its type is error-mark. We just need |
1f6d3a08 RH |
886 | to pick something that won't crash the rest of the compiler. */ |
887 | ||
888 | static void | |
889 | expand_one_error_var (tree var) | |
890 | { | |
891 | enum machine_mode mode = DECL_MODE (var); | |
892 | rtx x; | |
893 | ||
894 | if (mode == BLKmode) | |
895 | x = gen_rtx_MEM (BLKmode, const0_rtx); | |
896 | else if (mode == VOIDmode) | |
897 | x = const0_rtx; | |
898 | else | |
899 | x = gen_reg_rtx (mode); | |
900 | ||
901 | SET_DECL_RTL (var, x); | |
902 | } | |
903 | ||
c22cacf3 | 904 | /* A subroutine of expand_one_var. VAR is a variable that will be |
1f6d3a08 RH |
905 | allocated to the local stack frame. Return true if we wish to |
906 | add VAR to STACK_VARS so that it will be coalesced with other | |
907 | variables. Return false to allocate VAR immediately. | |
908 | ||
909 | This function is used to reduce the number of variables considered | |
910 | for coalescing, which reduces the size of the quadratic problem. */ | |
911 | ||
912 | static bool | |
913 | defer_stack_allocation (tree var, bool toplevel) | |
914 | { | |
7d69de61 RH |
915 | /* If stack protection is enabled, *all* stack variables must be deferred, |
916 | so that we can re-order the strings to the top of the frame. */ | |
917 | if (flag_stack_protect) | |
918 | return true; | |
919 | ||
1f6d3a08 RH |
920 | /* Variables in the outermost scope automatically conflict with |
921 | every other variable. The only reason to want to defer them | |
922 | at all is that, after sorting, we can more efficiently pack | |
923 | small variables in the stack frame. Continue to defer at -O2. */ | |
924 | if (toplevel && optimize < 2) | |
925 | return false; | |
926 | ||
927 | /* Without optimization, *most* variables are allocated from the | |
928 | stack, which makes the quadratic problem large exactly when we | |
c22cacf3 | 929 | want compilation to proceed as quickly as possible. On the |
1f6d3a08 RH |
930 | other hand, we don't want the function's stack frame size to |
931 | get completely out of hand. So we avoid adding scalars and | |
932 | "small" aggregates to the list at all. */ | |
933 | if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) | |
934 | return false; | |
935 | ||
936 | return true; | |
937 | } | |
938 | ||
939 | /* A subroutine of expand_used_vars. Expand one variable according to | |
2a7e31df | 940 | its flavor. Variables to be placed on the stack are not actually |
b8698a0f | 941 | expanded yet, merely recorded. |
ff28a94d JH |
942 | When REALLY_EXPAND is false, only add stack values to be allocated. |
943 | Return stack usage this variable is supposed to take. | |
944 | */ | |
1f6d3a08 | 945 | |
ff28a94d JH |
946 | static HOST_WIDE_INT |
947 | expand_one_var (tree var, bool toplevel, bool really_expand) | |
1f6d3a08 | 948 | { |
4e3825db MM |
949 | tree origvar = var; |
950 | var = SSAVAR (var); | |
951 | ||
2e3f842f L |
952 | if (SUPPORTS_STACK_ALIGNMENT |
953 | && TREE_TYPE (var) != error_mark_node | |
954 | && TREE_CODE (var) == VAR_DECL) | |
955 | { | |
956 | unsigned int align; | |
957 | ||
958 | /* Because we don't know if VAR will be in register or on stack, | |
959 | we conservatively assume it will be on stack even if VAR is | |
960 | eventually put into register after RA pass. For non-automatic | |
961 | variables, which won't be on stack, we collect alignment of | |
962 | type and ignore user specified alignment. */ | |
963 | if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
ae58e548 JJ |
964 | align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
965 | TYPE_MODE (TREE_TYPE (var)), | |
966 | TYPE_ALIGN (TREE_TYPE (var))); | |
2e3f842f | 967 | else |
ae58e548 | 968 | align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); |
2e3f842f L |
969 | |
970 | if (crtl->stack_alignment_estimated < align) | |
971 | { | |
972 | /* stack_alignment_estimated shouldn't change after stack | |
973 | realign decision made */ | |
974 | gcc_assert(!crtl->stack_realign_processed); | |
975 | crtl->stack_alignment_estimated = align; | |
976 | } | |
977 | } | |
978 | ||
4e3825db MM |
979 | if (TREE_CODE (origvar) == SSA_NAME) |
980 | { | |
981 | gcc_assert (TREE_CODE (var) != VAR_DECL | |
982 | || (!DECL_EXTERNAL (var) | |
983 | && !DECL_HAS_VALUE_EXPR_P (var) | |
984 | && !TREE_STATIC (var) | |
4e3825db MM |
985 | && TREE_TYPE (var) != error_mark_node |
986 | && !DECL_HARD_REGISTER (var) | |
987 | && really_expand)); | |
988 | } | |
989 | if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | |
4846b435 | 990 | ; |
1f6d3a08 RH |
991 | else if (DECL_EXTERNAL (var)) |
992 | ; | |
833b3afe | 993 | else if (DECL_HAS_VALUE_EXPR_P (var)) |
1f6d3a08 RH |
994 | ; |
995 | else if (TREE_STATIC (var)) | |
7e8b322a | 996 | ; |
eb7adebc | 997 | else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var)) |
1f6d3a08 RH |
998 | ; |
999 | else if (TREE_TYPE (var) == error_mark_node) | |
ff28a94d JH |
1000 | { |
1001 | if (really_expand) | |
1002 | expand_one_error_var (var); | |
1003 | } | |
4e3825db | 1004 | else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) |
ff28a94d JH |
1005 | { |
1006 | if (really_expand) | |
1007 | expand_one_hard_reg_var (var); | |
1008 | } | |
1f6d3a08 | 1009 | else if (use_register_for_decl (var)) |
ff28a94d JH |
1010 | { |
1011 | if (really_expand) | |
4e3825db | 1012 | expand_one_register_var (origvar); |
ff28a94d | 1013 | } |
7604eb4e JJ |
1014 | else if (!host_integerp (DECL_SIZE_UNIT (var), 1)) |
1015 | { | |
1016 | if (really_expand) | |
1017 | { | |
1018 | error ("size of variable %q+D is too large", var); | |
1019 | expand_one_error_var (var); | |
1020 | } | |
1021 | } | |
1f6d3a08 | 1022 | else if (defer_stack_allocation (var, toplevel)) |
4e3825db | 1023 | add_stack_var (origvar); |
1f6d3a08 | 1024 | else |
ff28a94d | 1025 | { |
bd9f1b4b | 1026 | if (really_expand) |
4e3825db | 1027 | expand_one_stack_var (origvar); |
ff28a94d JH |
1028 | return tree_low_cst (DECL_SIZE_UNIT (var), 1); |
1029 | } | |
1030 | return 0; | |
1f6d3a08 RH |
1031 | } |
1032 | ||
1033 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1034 | expanding variables. Those variables that can be put into registers | |
1035 | are allocated pseudos; those that can't are put on the stack. | |
1036 | ||
1037 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1038 | ||
1039 | static void | |
1040 | expand_used_vars_for_block (tree block, bool toplevel) | |
1041 | { | |
1042 | size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1043 | tree t; | |
1044 | ||
1045 | old_sv_num = toplevel ? 0 : stack_vars_num; | |
1046 | ||
1047 | /* Expand all variables at this level. */ | |
1048 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
7e8b322a | 1049 | if (TREE_USED (t)) |
ff28a94d | 1050 | expand_one_var (t, toplevel, true); |
1f6d3a08 RH |
1051 | |
1052 | this_sv_num = stack_vars_num; | |
1053 | ||
1054 | /* Expand all variables at containing levels. */ | |
1055 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1056 | expand_used_vars_for_block (t, false); | |
1057 | ||
1058 | /* Since we do not track exact variable lifetimes (which is not even | |
6fc0bb99 | 1059 | possible for variables whose address escapes), we mirror the block |
1f6d3a08 | 1060 | tree in the interference graph. Here we cause all variables at this |
2bdbbe94 | 1061 | level, and all sublevels, to conflict. */ |
1f6d3a08 RH |
1062 | if (old_sv_num < this_sv_num) |
1063 | { | |
1064 | new_sv_num = stack_vars_num; | |
1f6d3a08 RH |
1065 | |
1066 | for (i = old_sv_num; i < new_sv_num; ++i) | |
2bdbbe94 | 1067 | for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) |
f4a6d54e | 1068 | add_stack_var_conflict (i, j); |
1f6d3a08 RH |
1069 | } |
1070 | } | |
1071 | ||
1072 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1073 | and clear TREE_USED on all local variables. */ | |
1074 | ||
1075 | static void | |
1076 | clear_tree_used (tree block) | |
1077 | { | |
1078 | tree t; | |
1079 | ||
1080 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1081 | /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | |
1082 | TREE_USED (t) = 0; | |
1083 | ||
1084 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1085 | clear_tree_used (t); | |
1086 | } | |
1087 | ||
7d69de61 RH |
1088 | /* Examine TYPE and determine a bit mask of the following features. */ |
1089 | ||
1090 | #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | |
1091 | #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | |
1092 | #define SPCT_HAS_ARRAY 4 | |
1093 | #define SPCT_HAS_AGGREGATE 8 | |
1094 | ||
1095 | static unsigned int | |
1096 | stack_protect_classify_type (tree type) | |
1097 | { | |
1098 | unsigned int ret = 0; | |
1099 | tree t; | |
1100 | ||
1101 | switch (TREE_CODE (type)) | |
1102 | { | |
1103 | case ARRAY_TYPE: | |
1104 | t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
1105 | if (t == char_type_node | |
1106 | || t == signed_char_type_node | |
1107 | || t == unsigned_char_type_node) | |
1108 | { | |
15362b89 JJ |
1109 | unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1110 | unsigned HOST_WIDE_INT len; | |
7d69de61 | 1111 | |
15362b89 JJ |
1112 | if (!TYPE_SIZE_UNIT (type) |
1113 | || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | |
1114 | len = max; | |
7d69de61 | 1115 | else |
15362b89 | 1116 | len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); |
7d69de61 RH |
1117 | |
1118 | if (len < max) | |
1119 | ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1120 | else | |
1121 | ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1122 | } | |
1123 | else | |
1124 | ret = SPCT_HAS_ARRAY; | |
1125 | break; | |
1126 | ||
1127 | case UNION_TYPE: | |
1128 | case QUAL_UNION_TYPE: | |
1129 | case RECORD_TYPE: | |
1130 | ret = SPCT_HAS_AGGREGATE; | |
1131 | for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | |
1132 | if (TREE_CODE (t) == FIELD_DECL) | |
1133 | ret |= stack_protect_classify_type (TREE_TYPE (t)); | |
1134 | break; | |
1135 | ||
1136 | default: | |
1137 | break; | |
1138 | } | |
1139 | ||
1140 | return ret; | |
1141 | } | |
1142 | ||
a4d05547 KH |
1143 | /* Return nonzero if DECL should be segregated into the "vulnerable" upper |
1144 | part of the local stack frame. Remember if we ever return nonzero for | |
7d69de61 RH |
1145 | any variable in this function. The return value is the phase number in |
1146 | which the variable should be allocated. */ | |
1147 | ||
1148 | static int | |
1149 | stack_protect_decl_phase (tree decl) | |
1150 | { | |
1151 | unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl)); | |
1152 | int ret = 0; | |
1153 | ||
1154 | if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) | |
1155 | has_short_buffer = true; | |
1156 | ||
1157 | if (flag_stack_protect == 2) | |
1158 | { | |
1159 | if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | |
1160 | && !(bits & SPCT_HAS_AGGREGATE)) | |
1161 | ret = 1; | |
1162 | else if (bits & SPCT_HAS_ARRAY) | |
1163 | ret = 2; | |
1164 | } | |
1165 | else | |
1166 | ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | |
1167 | ||
1168 | if (ret) | |
1169 | has_protected_decls = true; | |
1170 | ||
1171 | return ret; | |
1172 | } | |
1173 | ||
1174 | /* Two helper routines that check for phase 1 and phase 2. These are used | |
1175 | as callbacks for expand_stack_vars. */ | |
1176 | ||
1177 | static bool | |
1178 | stack_protect_decl_phase_1 (tree decl) | |
1179 | { | |
1180 | return stack_protect_decl_phase (decl) == 1; | |
1181 | } | |
1182 | ||
1183 | static bool | |
1184 | stack_protect_decl_phase_2 (tree decl) | |
1185 | { | |
1186 | return stack_protect_decl_phase (decl) == 2; | |
1187 | } | |
1188 | ||
1189 | /* Ensure that variables in different stack protection phases conflict | |
1190 | so that they are not merged and share the same stack slot. */ | |
1191 | ||
1192 | static void | |
1193 | add_stack_protection_conflicts (void) | |
1194 | { | |
1195 | size_t i, j, n = stack_vars_num; | |
1196 | unsigned char *phase; | |
1197 | ||
1198 | phase = XNEWVEC (unsigned char, n); | |
1199 | for (i = 0; i < n; ++i) | |
1200 | phase[i] = stack_protect_decl_phase (stack_vars[i].decl); | |
1201 | ||
1202 | for (i = 0; i < n; ++i) | |
1203 | { | |
1204 | unsigned char ph_i = phase[i]; | |
1205 | for (j = 0; j < i; ++j) | |
1206 | if (ph_i != phase[j]) | |
1207 | add_stack_var_conflict (i, j); | |
1208 | } | |
1209 | ||
1210 | XDELETEVEC (phase); | |
1211 | } | |
1212 | ||
1213 | /* Create a decl for the guard at the top of the stack frame. */ | |
1214 | ||
1215 | static void | |
1216 | create_stack_guard (void) | |
1217 | { | |
c2255bc4 AH |
1218 | tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
1219 | VAR_DECL, NULL, ptr_type_node); | |
7d69de61 RH |
1220 | TREE_THIS_VOLATILE (guard) = 1; |
1221 | TREE_USED (guard) = 1; | |
1222 | expand_one_stack_var (guard); | |
cb91fab0 | 1223 | crtl->stack_protect_guard = guard; |
7d69de61 RH |
1224 | } |
1225 | ||
ff28a94d JH |
1226 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1227 | expanding variables. Those variables that can be put into registers | |
1228 | are allocated pseudos; those that can't are put on the stack. | |
1229 | ||
1230 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1231 | ||
1232 | static HOST_WIDE_INT | |
1233 | account_used_vars_for_block (tree block, bool toplevel) | |
1234 | { | |
ff28a94d JH |
1235 | tree t; |
1236 | HOST_WIDE_INT size = 0; | |
1237 | ||
ff28a94d JH |
1238 | /* Expand all variables at this level. */ |
1239 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1240 | if (TREE_USED (t)) | |
1241 | size += expand_one_var (t, toplevel, false); | |
1242 | ||
ff28a94d JH |
1243 | /* Expand all variables at containing levels. */ |
1244 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1245 | size += account_used_vars_for_block (t, false); | |
1246 | ||
ff28a94d JH |
1247 | return size; |
1248 | } | |
1249 | ||
1250 | /* Prepare for expanding variables. */ | |
b8698a0f | 1251 | static void |
ff28a94d JH |
1252 | init_vars_expansion (void) |
1253 | { | |
1254 | tree t; | |
cb91fab0 JH |
1255 | /* Set TREE_USED on all variables in the local_decls. */ |
1256 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
ff28a94d JH |
1257 | TREE_USED (TREE_VALUE (t)) = 1; |
1258 | ||
1259 | /* Clear TREE_USED on all variables associated with a block scope. */ | |
1260 | clear_tree_used (DECL_INITIAL (current_function_decl)); | |
1261 | ||
1262 | /* Initialize local stack smashing state. */ | |
1263 | has_protected_decls = false; | |
1264 | has_short_buffer = false; | |
1265 | } | |
1266 | ||
1267 | /* Free up stack variable graph data. */ | |
1268 | static void | |
1269 | fini_vars_expansion (void) | |
1270 | { | |
2bdbbe94 MM |
1271 | size_t i, n = stack_vars_num; |
1272 | for (i = 0; i < n; i++) | |
1273 | BITMAP_FREE (stack_vars[i].conflicts); | |
ff28a94d JH |
1274 | XDELETEVEC (stack_vars); |
1275 | XDELETEVEC (stack_vars_sorted); | |
ff28a94d JH |
1276 | stack_vars = NULL; |
1277 | stack_vars_alloc = stack_vars_num = 0; | |
ff28a94d JH |
1278 | } |
1279 | ||
b5a430f3 SB |
1280 | /* Make a fair guess for the size of the stack frame of the current |
1281 | function. This doesn't have to be exact, the result is only used | |
1282 | in the inline heuristics. So we don't want to run the full stack | |
1283 | var packing algorithm (which is quadratic in the number of stack | |
1284 | vars). Instead, we calculate the total size of all stack vars. | |
1285 | This turns out to be a pretty fair estimate -- packing of stack | |
1286 | vars doesn't happen very often. */ | |
1287 | ||
ff28a94d JH |
1288 | HOST_WIDE_INT |
1289 | estimated_stack_frame_size (void) | |
1290 | { | |
1291 | HOST_WIDE_INT size = 0; | |
b5a430f3 | 1292 | size_t i; |
ff28a94d JH |
1293 | tree t, outer_block = DECL_INITIAL (current_function_decl); |
1294 | ||
1295 | init_vars_expansion (); | |
1296 | ||
cb91fab0 | 1297 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) |
ff28a94d JH |
1298 | { |
1299 | tree var = TREE_VALUE (t); | |
1300 | ||
1301 | if (TREE_USED (var)) | |
1302 | size += expand_one_var (var, true, false); | |
1303 | TREE_USED (var) = 1; | |
1304 | } | |
1305 | size += account_used_vars_for_block (outer_block, true); | |
b5a430f3 | 1306 | |
ff28a94d JH |
1307 | if (stack_vars_num > 0) |
1308 | { | |
b5a430f3 SB |
1309 | /* Fake sorting the stack vars for account_stack_vars (). */ |
1310 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
1311 | for (i = 0; i < stack_vars_num; ++i) | |
1312 | stack_vars_sorted[i] = i; | |
ff28a94d JH |
1313 | size += account_stack_vars (); |
1314 | fini_vars_expansion (); | |
1315 | } | |
b5a430f3 | 1316 | |
ff28a94d JH |
1317 | return size; |
1318 | } | |
1319 | ||
1f6d3a08 | 1320 | /* Expand all variables used in the function. */ |
727a31fa RH |
1321 | |
1322 | static void | |
1323 | expand_used_vars (void) | |
1324 | { | |
802e9f8e | 1325 | tree t, next, outer_block = DECL_INITIAL (current_function_decl); |
4e3825db | 1326 | unsigned i; |
727a31fa | 1327 | |
1f6d3a08 RH |
1328 | /* Compute the phase of the stack frame for this function. */ |
1329 | { | |
1330 | int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1331 | int off = STARTING_FRAME_OFFSET % align; | |
1332 | frame_phase = off ? align - off : 0; | |
1333 | } | |
727a31fa | 1334 | |
ff28a94d | 1335 | init_vars_expansion (); |
7d69de61 | 1336 | |
4e3825db MM |
1337 | for (i = 0; i < SA.map->num_partitions; i++) |
1338 | { | |
1339 | tree var = partition_to_var (SA.map, i); | |
1340 | ||
1341 | gcc_assert (is_gimple_reg (var)); | |
1342 | if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
1343 | expand_one_var (var, true, true); | |
1344 | else | |
1345 | { | |
1346 | /* This is a PARM_DECL or RESULT_DECL. For those partitions that | |
1347 | contain the default def (representing the parm or result itself) | |
1348 | we don't do anything here. But those which don't contain the | |
1349 | default def (representing a temporary based on the parm/result) | |
1350 | we need to allocate space just like for normal VAR_DECLs. */ | |
1351 | if (!bitmap_bit_p (SA.partition_has_default_def, i)) | |
1352 | { | |
1353 | expand_one_var (var, true, true); | |
1354 | gcc_assert (SA.partition_to_pseudo[i]); | |
1355 | } | |
1356 | } | |
1357 | } | |
1358 | ||
cb91fab0 | 1359 | /* At this point all variables on the local_decls with TREE_USED |
1f6d3a08 | 1360 | set are not associated with any block scope. Lay them out. */ |
802e9f8e JJ |
1361 | t = cfun->local_decls; |
1362 | cfun->local_decls = NULL_TREE; | |
1363 | for (; t; t = next) | |
1f6d3a08 RH |
1364 | { |
1365 | tree var = TREE_VALUE (t); | |
1366 | bool expand_now = false; | |
1367 | ||
802e9f8e JJ |
1368 | next = TREE_CHAIN (t); |
1369 | ||
4e3825db MM |
1370 | /* Expanded above already. */ |
1371 | if (is_gimple_reg (var)) | |
eb7adebc MM |
1372 | { |
1373 | TREE_USED (var) = 0; | |
1374 | ggc_free (t); | |
1375 | continue; | |
1376 | } | |
1f6d3a08 RH |
1377 | /* We didn't set a block for static or extern because it's hard |
1378 | to tell the difference between a global variable (re)declared | |
1379 | in a local scope, and one that's really declared there to | |
1380 | begin with. And it doesn't really matter much, since we're | |
1381 | not giving them stack space. Expand them now. */ | |
4e3825db | 1382 | else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1f6d3a08 RH |
1383 | expand_now = true; |
1384 | ||
1385 | /* If the variable is not associated with any block, then it | |
1386 | was created by the optimizers, and could be live anywhere | |
1387 | in the function. */ | |
1388 | else if (TREE_USED (var)) | |
1389 | expand_now = true; | |
1390 | ||
1391 | /* Finally, mark all variables on the list as used. We'll use | |
1392 | this in a moment when we expand those associated with scopes. */ | |
1393 | TREE_USED (var) = 1; | |
1394 | ||
1395 | if (expand_now) | |
802e9f8e JJ |
1396 | { |
1397 | expand_one_var (var, true, true); | |
1398 | if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | |
1399 | { | |
1400 | rtx rtl = DECL_RTL_IF_SET (var); | |
1401 | ||
1402 | /* Keep artificial non-ignored vars in cfun->local_decls | |
1403 | chain until instantiate_decls. */ | |
1404 | if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
1405 | { | |
1406 | TREE_CHAIN (t) = cfun->local_decls; | |
1407 | cfun->local_decls = t; | |
1408 | continue; | |
1409 | } | |
1410 | } | |
1411 | } | |
1412 | ||
1413 | ggc_free (t); | |
1f6d3a08 | 1414 | } |
1f6d3a08 RH |
1415 | |
1416 | /* At this point, all variables within the block tree with TREE_USED | |
1417 | set are actually used by the optimized function. Lay them out. */ | |
1418 | expand_used_vars_for_block (outer_block, true); | |
1419 | ||
1420 | if (stack_vars_num > 0) | |
1421 | { | |
1422 | /* Due to the way alias sets work, no variables with non-conflicting | |
c22cacf3 | 1423 | alias sets may be assigned the same address. Add conflicts to |
1f6d3a08 RH |
1424 | reflect this. */ |
1425 | add_alias_set_conflicts (); | |
1426 | ||
c22cacf3 | 1427 | /* If stack protection is enabled, we don't share space between |
7d69de61 RH |
1428 | vulnerable data and non-vulnerable data. */ |
1429 | if (flag_stack_protect) | |
1430 | add_stack_protection_conflicts (); | |
1431 | ||
c22cacf3 | 1432 | /* Now that we have collected all stack variables, and have computed a |
1f6d3a08 RH |
1433 | minimal interference graph, attempt to save some stack space. */ |
1434 | partition_stack_vars (); | |
1435 | if (dump_file) | |
1436 | dump_stack_var_partition (); | |
7d69de61 RH |
1437 | } |
1438 | ||
1439 | /* There are several conditions under which we should create a | |
1440 | stack guard: protect-all, alloca used, protected decls present. */ | |
1441 | if (flag_stack_protect == 2 | |
1442 | || (flag_stack_protect | |
e3b5732b | 1443 | && (cfun->calls_alloca || has_protected_decls))) |
7d69de61 | 1444 | create_stack_guard (); |
1f6d3a08 | 1445 | |
7d69de61 RH |
1446 | /* Assign rtl to each variable based on these partitions. */ |
1447 | if (stack_vars_num > 0) | |
1448 | { | |
1449 | /* Reorder decls to be protected by iterating over the variables | |
1450 | array multiple times, and allocating out of each phase in turn. */ | |
c22cacf3 | 1451 | /* ??? We could probably integrate this into the qsort we did |
7d69de61 RH |
1452 | earlier, such that we naturally see these variables first, |
1453 | and thus naturally allocate things in the right order. */ | |
1454 | if (has_protected_decls) | |
1455 | { | |
1456 | /* Phase 1 contains only character arrays. */ | |
1457 | expand_stack_vars (stack_protect_decl_phase_1); | |
1458 | ||
1459 | /* Phase 2 contains other kinds of arrays. */ | |
1460 | if (flag_stack_protect == 2) | |
1461 | expand_stack_vars (stack_protect_decl_phase_2); | |
1462 | } | |
1463 | ||
1464 | expand_stack_vars (NULL); | |
1f6d3a08 | 1465 | |
ff28a94d | 1466 | fini_vars_expansion (); |
1f6d3a08 RH |
1467 | } |
1468 | ||
1469 | /* If the target requires that FRAME_OFFSET be aligned, do it. */ | |
1470 | if (STACK_ALIGNMENT_NEEDED) | |
1471 | { | |
1472 | HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1473 | if (!FRAME_GROWS_DOWNWARD) | |
1474 | frame_offset += align - 1; | |
1475 | frame_offset &= -align; | |
1476 | } | |
727a31fa RH |
1477 | } |
1478 | ||
1479 | ||
b7211528 SB |
1480 | /* If we need to produce a detailed dump, print the tree representation |
1481 | for STMT to the dump file. SINCE is the last RTX after which the RTL | |
1482 | generated for STMT should have been appended. */ | |
1483 | ||
1484 | static void | |
726a989a | 1485 | maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) |
b7211528 SB |
1486 | { |
1487 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1488 | { | |
1489 | fprintf (dump_file, "\n;; "); | |
b5b8b0ac AO |
1490 | print_gimple_stmt (dump_file, stmt, 0, |
1491 | TDF_SLIM | (dump_flags & TDF_LINENO)); | |
b7211528 SB |
1492 | fprintf (dump_file, "\n"); |
1493 | ||
1494 | print_rtl (dump_file, since ? NEXT_INSN (since) : since); | |
1495 | } | |
1496 | } | |
1497 | ||
8b11009b ZD |
1498 | /* Maps the blocks that do not contain tree labels to rtx labels. */ |
1499 | ||
1500 | static struct pointer_map_t *lab_rtx_for_bb; | |
1501 | ||
a9b77cd1 ZD |
1502 | /* Returns the label_rtx expression for a label starting basic block BB. */ |
1503 | ||
1504 | static rtx | |
726a989a | 1505 | label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) |
a9b77cd1 | 1506 | { |
726a989a RB |
1507 | gimple_stmt_iterator gsi; |
1508 | tree lab; | |
1509 | gimple lab_stmt; | |
8b11009b | 1510 | void **elt; |
a9b77cd1 ZD |
1511 | |
1512 | if (bb->flags & BB_RTL) | |
1513 | return block_label (bb); | |
1514 | ||
8b11009b ZD |
1515 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1516 | if (elt) | |
ae50c0cb | 1517 | return (rtx) *elt; |
8b11009b ZD |
1518 | |
1519 | /* Find the tree label if it is present. */ | |
b8698a0f | 1520 | |
726a989a | 1521 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
a9b77cd1 | 1522 | { |
726a989a RB |
1523 | lab_stmt = gsi_stmt (gsi); |
1524 | if (gimple_code (lab_stmt) != GIMPLE_LABEL) | |
a9b77cd1 ZD |
1525 | break; |
1526 | ||
726a989a | 1527 | lab = gimple_label_label (lab_stmt); |
a9b77cd1 ZD |
1528 | if (DECL_NONLOCAL (lab)) |
1529 | break; | |
1530 | ||
1531 | return label_rtx (lab); | |
1532 | } | |
1533 | ||
8b11009b ZD |
1534 | elt = pointer_map_insert (lab_rtx_for_bb, bb); |
1535 | *elt = gen_label_rtx (); | |
ae50c0cb | 1536 | return (rtx) *elt; |
a9b77cd1 ZD |
1537 | } |
1538 | ||
726a989a | 1539 | |
529ff441 MM |
1540 | /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge |
1541 | of a basic block where we just expanded the conditional at the end, | |
315adeda MM |
1542 | possibly clean up the CFG and instruction sequence. LAST is the |
1543 | last instruction before the just emitted jump sequence. */ | |
529ff441 MM |
1544 | |
1545 | static void | |
315adeda | 1546 | maybe_cleanup_end_of_block (edge e, rtx last) |
529ff441 MM |
1547 | { |
1548 | /* Special case: when jumpif decides that the condition is | |
1549 | trivial it emits an unconditional jump (and the necessary | |
1550 | barrier). But we still have two edges, the fallthru one is | |
1551 | wrong. purge_dead_edges would clean this up later. Unfortunately | |
1552 | we have to insert insns (and split edges) before | |
1553 | find_many_sub_basic_blocks and hence before purge_dead_edges. | |
1554 | But splitting edges might create new blocks which depend on the | |
1555 | fact that if there are two edges there's no barrier. So the | |
1556 | barrier would get lost and verify_flow_info would ICE. Instead | |
1557 | of auditing all edge splitters to care for the barrier (which | |
1558 | normally isn't there in a cleaned CFG), fix it here. */ | |
1559 | if (BARRIER_P (get_last_insn ())) | |
1560 | { | |
529ff441 MM |
1561 | rtx insn; |
1562 | remove_edge (e); | |
1563 | /* Now, we have a single successor block, if we have insns to | |
1564 | insert on the remaining edge we potentially will insert | |
1565 | it at the end of this block (if the dest block isn't feasible) | |
1566 | in order to avoid splitting the edge. This insertion will take | |
1567 | place in front of the last jump. But we might have emitted | |
1568 | multiple jumps (conditional and one unconditional) to the | |
1569 | same destination. Inserting in front of the last one then | |
1570 | is a problem. See PR 40021. We fix this by deleting all | |
1571 | jumps except the last unconditional one. */ | |
1572 | insn = PREV_INSN (get_last_insn ()); | |
1573 | /* Make sure we have an unconditional jump. Otherwise we're | |
1574 | confused. */ | |
1575 | gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | |
315adeda | 1576 | for (insn = PREV_INSN (insn); insn != last;) |
529ff441 MM |
1577 | { |
1578 | insn = PREV_INSN (insn); | |
1579 | if (JUMP_P (NEXT_INSN (insn))) | |
1580 | delete_insn (NEXT_INSN (insn)); | |
1581 | } | |
1582 | } | |
1583 | } | |
1584 | ||
726a989a | 1585 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. |
80c7a9eb RH |
1586 | Returns a new basic block if we've terminated the current basic |
1587 | block and created a new one. */ | |
1588 | ||
1589 | static basic_block | |
726a989a | 1590 | expand_gimple_cond (basic_block bb, gimple stmt) |
80c7a9eb RH |
1591 | { |
1592 | basic_block new_bb, dest; | |
1593 | edge new_edge; | |
1594 | edge true_edge; | |
1595 | edge false_edge; | |
b7211528 | 1596 | rtx last2, last; |
28ed065e MM |
1597 | enum tree_code code; |
1598 | tree op0, op1; | |
1599 | ||
1600 | code = gimple_cond_code (stmt); | |
1601 | op0 = gimple_cond_lhs (stmt); | |
1602 | op1 = gimple_cond_rhs (stmt); | |
1603 | /* We're sometimes presented with such code: | |
1604 | D.123_1 = x < y; | |
1605 | if (D.123_1 != 0) | |
1606 | ... | |
1607 | This would expand to two comparisons which then later might | |
1608 | be cleaned up by combine. But some pattern matchers like if-conversion | |
1609 | work better when there's only one compare, so make up for this | |
1610 | here as special exception if TER would have made the same change. */ | |
1611 | if (gimple_cond_single_var_p (stmt) | |
1612 | && SA.values | |
1613 | && TREE_CODE (op0) == SSA_NAME | |
1614 | && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | |
1615 | { | |
1616 | gimple second = SSA_NAME_DEF_STMT (op0); | |
e83f4b68 | 1617 | if (gimple_code (second) == GIMPLE_ASSIGN) |
28ed065e | 1618 | { |
e83f4b68 MM |
1619 | enum tree_code code2 = gimple_assign_rhs_code (second); |
1620 | if (TREE_CODE_CLASS (code2) == tcc_comparison) | |
1621 | { | |
1622 | code = code2; | |
1623 | op0 = gimple_assign_rhs1 (second); | |
1624 | op1 = gimple_assign_rhs2 (second); | |
1625 | } | |
1626 | /* If jumps are cheap turn some more codes into | |
1627 | jumpy sequences. */ | |
1628 | else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | |
1629 | { | |
1630 | if ((code2 == BIT_AND_EXPR | |
1631 | && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | |
1632 | && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | |
1633 | || code2 == TRUTH_AND_EXPR) | |
1634 | { | |
1635 | code = TRUTH_ANDIF_EXPR; | |
1636 | op0 = gimple_assign_rhs1 (second); | |
1637 | op1 = gimple_assign_rhs2 (second); | |
1638 | } | |
1639 | else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) | |
1640 | { | |
1641 | code = TRUTH_ORIF_EXPR; | |
1642 | op0 = gimple_assign_rhs1 (second); | |
1643 | op1 = gimple_assign_rhs2 (second); | |
1644 | } | |
1645 | } | |
28ed065e MM |
1646 | } |
1647 | } | |
b7211528 SB |
1648 | |
1649 | last2 = last = get_last_insn (); | |
80c7a9eb RH |
1650 | |
1651 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
726a989a | 1652 | if (gimple_has_location (stmt)) |
80c7a9eb | 1653 | { |
726a989a RB |
1654 | set_curr_insn_source_location (gimple_location (stmt)); |
1655 | set_curr_insn_block (gimple_block (stmt)); | |
80c7a9eb RH |
1656 | } |
1657 | ||
1658 | /* These flags have no purpose in RTL land. */ | |
1659 | true_edge->flags &= ~EDGE_TRUE_VALUE; | |
1660 | false_edge->flags &= ~EDGE_FALSE_VALUE; | |
1661 | ||
1662 | /* We can either have a pure conditional jump with one fallthru edge or | |
1663 | two-way jump that needs to be decomposed into two basic blocks. */ | |
a9b77cd1 | 1664 | if (false_edge->dest == bb->next_bb) |
80c7a9eb | 1665 | { |
28ed065e | 1666 | jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); |
10d22567 | 1667 | add_reg_br_prob_note (last, true_edge->probability); |
726a989a | 1668 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1669 | if (true_edge->goto_locus) |
7241571e JJ |
1670 | { |
1671 | set_curr_insn_source_location (true_edge->goto_locus); | |
1672 | set_curr_insn_block (true_edge->goto_block); | |
1673 | true_edge->goto_locus = curr_insn_locator (); | |
1674 | } | |
1675 | true_edge->goto_block = NULL; | |
a9b77cd1 | 1676 | false_edge->flags |= EDGE_FALLTHRU; |
315adeda | 1677 | maybe_cleanup_end_of_block (false_edge, last); |
80c7a9eb RH |
1678 | return NULL; |
1679 | } | |
a9b77cd1 | 1680 | if (true_edge->dest == bb->next_bb) |
80c7a9eb | 1681 | { |
28ed065e | 1682 | jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest)); |
10d22567 | 1683 | add_reg_br_prob_note (last, false_edge->probability); |
726a989a | 1684 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1685 | if (false_edge->goto_locus) |
7241571e JJ |
1686 | { |
1687 | set_curr_insn_source_location (false_edge->goto_locus); | |
1688 | set_curr_insn_block (false_edge->goto_block); | |
1689 | false_edge->goto_locus = curr_insn_locator (); | |
1690 | } | |
1691 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1692 | true_edge->flags |= EDGE_FALLTHRU; |
315adeda | 1693 | maybe_cleanup_end_of_block (true_edge, last); |
80c7a9eb RH |
1694 | return NULL; |
1695 | } | |
80c7a9eb | 1696 | |
28ed065e | 1697 | jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest)); |
10d22567 | 1698 | add_reg_br_prob_note (last, true_edge->probability); |
80c7a9eb | 1699 | last = get_last_insn (); |
7241571e JJ |
1700 | if (false_edge->goto_locus) |
1701 | { | |
1702 | set_curr_insn_source_location (false_edge->goto_locus); | |
1703 | set_curr_insn_block (false_edge->goto_block); | |
1704 | false_edge->goto_locus = curr_insn_locator (); | |
1705 | } | |
1706 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1707 | emit_jump (label_rtx_for_bb (false_edge->dest)); |
80c7a9eb RH |
1708 | |
1709 | BB_END (bb) = last; | |
1710 | if (BARRIER_P (BB_END (bb))) | |
1711 | BB_END (bb) = PREV_INSN (BB_END (bb)); | |
1712 | update_bb_for_insn (bb); | |
1713 | ||
1714 | new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
1715 | dest = false_edge->dest; | |
1716 | redirect_edge_succ (false_edge, new_bb); | |
1717 | false_edge->flags |= EDGE_FALLTHRU; | |
1718 | new_bb->count = false_edge->count; | |
1719 | new_bb->frequency = EDGE_FREQUENCY (false_edge); | |
1720 | new_edge = make_edge (new_bb, dest, 0); | |
1721 | new_edge->probability = REG_BR_PROB_BASE; | |
1722 | new_edge->count = new_bb->count; | |
1723 | if (BARRIER_P (BB_END (new_bb))) | |
1724 | BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); | |
1725 | update_bb_for_insn (new_bb); | |
1726 | ||
726a989a | 1727 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
c22cacf3 | 1728 | |
7787b4aa JJ |
1729 | if (true_edge->goto_locus) |
1730 | { | |
1731 | set_curr_insn_source_location (true_edge->goto_locus); | |
1732 | set_curr_insn_block (true_edge->goto_block); | |
1733 | true_edge->goto_locus = curr_insn_locator (); | |
1734 | } | |
1735 | true_edge->goto_block = NULL; | |
1736 | ||
80c7a9eb RH |
1737 | return new_bb; |
1738 | } | |
1739 | ||
28ed065e MM |
1740 | /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL |
1741 | statement STMT. */ | |
1742 | ||
1743 | static void | |
1744 | expand_call_stmt (gimple stmt) | |
1745 | { | |
1746 | tree exp; | |
1747 | tree lhs = gimple_call_lhs (stmt); | |
28ed065e | 1748 | size_t i; |
e23817b3 RG |
1749 | bool builtin_p; |
1750 | tree decl; | |
28ed065e MM |
1751 | |
1752 | exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
1753 | ||
1754 | CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | |
e23817b3 RG |
1755 | decl = gimple_call_fndecl (stmt); |
1756 | builtin_p = decl && DECL_BUILT_IN (decl); | |
1757 | ||
28ed065e MM |
1758 | TREE_TYPE (exp) = gimple_call_return_type (stmt); |
1759 | CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | |
1760 | ||
1761 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
e23817b3 RG |
1762 | { |
1763 | tree arg = gimple_call_arg (stmt, i); | |
1764 | gimple def; | |
1765 | /* TER addresses into arguments of builtin functions so we have a | |
1766 | chance to infer more correct alignment information. See PR39954. */ | |
1767 | if (builtin_p | |
1768 | && TREE_CODE (arg) == SSA_NAME | |
1769 | && (def = get_gimple_for_ssa_name (arg)) | |
1770 | && gimple_assign_rhs_code (def) == ADDR_EXPR) | |
1771 | arg = gimple_assign_rhs1 (def); | |
1772 | CALL_EXPR_ARG (exp, i) = arg; | |
1773 | } | |
28ed065e | 1774 | |
93f28ca7 | 1775 | if (gimple_has_side_effects (stmt)) |
28ed065e MM |
1776 | TREE_SIDE_EFFECTS (exp) = 1; |
1777 | ||
93f28ca7 | 1778 | if (gimple_call_nothrow_p (stmt)) |
28ed065e MM |
1779 | TREE_NOTHROW (exp) = 1; |
1780 | ||
1781 | CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); | |
1782 | CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); | |
1783 | CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | |
1784 | CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt); | |
1785 | CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); | |
1786 | SET_EXPR_LOCATION (exp, gimple_location (stmt)); | |
1787 | TREE_BLOCK (exp) = gimple_block (stmt); | |
1788 | ||
28ed065e MM |
1789 | if (lhs) |
1790 | expand_assignment (lhs, exp, false); | |
1791 | else | |
1792 | expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL); | |
1793 | } | |
1794 | ||
1795 | /* A subroutine of expand_gimple_stmt, expanding one gimple statement | |
1796 | STMT that doesn't require special handling for outgoing edges. That | |
1797 | is no tailcalls and no GIMPLE_COND. */ | |
1798 | ||
1799 | static void | |
1800 | expand_gimple_stmt_1 (gimple stmt) | |
1801 | { | |
1802 | tree op0; | |
1803 | switch (gimple_code (stmt)) | |
1804 | { | |
1805 | case GIMPLE_GOTO: | |
1806 | op0 = gimple_goto_dest (stmt); | |
1807 | if (TREE_CODE (op0) == LABEL_DECL) | |
1808 | expand_goto (op0); | |
1809 | else | |
1810 | expand_computed_goto (op0); | |
1811 | break; | |
1812 | case GIMPLE_LABEL: | |
1813 | expand_label (gimple_label_label (stmt)); | |
1814 | break; | |
1815 | case GIMPLE_NOP: | |
1816 | case GIMPLE_PREDICT: | |
1817 | break; | |
28ed065e MM |
1818 | case GIMPLE_SWITCH: |
1819 | expand_case (stmt); | |
1820 | break; | |
1821 | case GIMPLE_ASM: | |
1822 | expand_asm_stmt (stmt); | |
1823 | break; | |
1824 | case GIMPLE_CALL: | |
1825 | expand_call_stmt (stmt); | |
1826 | break; | |
1827 | ||
1828 | case GIMPLE_RETURN: | |
1829 | op0 = gimple_return_retval (stmt); | |
1830 | ||
1831 | if (op0 && op0 != error_mark_node) | |
1832 | { | |
1833 | tree result = DECL_RESULT (current_function_decl); | |
1834 | ||
1835 | /* If we are not returning the current function's RESULT_DECL, | |
1836 | build an assignment to it. */ | |
1837 | if (op0 != result) | |
1838 | { | |
1839 | /* I believe that a function's RESULT_DECL is unique. */ | |
1840 | gcc_assert (TREE_CODE (op0) != RESULT_DECL); | |
1841 | ||
1842 | /* ??? We'd like to use simply expand_assignment here, | |
1843 | but this fails if the value is of BLKmode but the return | |
1844 | decl is a register. expand_return has special handling | |
1845 | for this combination, which eventually should move | |
1846 | to common code. See comments there. Until then, let's | |
1847 | build a modify expression :-/ */ | |
1848 | op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
1849 | result, op0); | |
1850 | } | |
1851 | } | |
1852 | if (!op0) | |
1853 | expand_null_return (); | |
1854 | else | |
1855 | expand_return (op0); | |
1856 | break; | |
1857 | ||
1858 | case GIMPLE_ASSIGN: | |
1859 | { | |
1860 | tree lhs = gimple_assign_lhs (stmt); | |
1861 | ||
1862 | /* Tree expand used to fiddle with |= and &= of two bitfield | |
1863 | COMPONENT_REFs here. This can't happen with gimple, the LHS | |
1864 | of binary assigns must be a gimple reg. */ | |
1865 | ||
1866 | if (TREE_CODE (lhs) != SSA_NAME | |
1867 | || get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1868 | == GIMPLE_SINGLE_RHS) | |
1869 | { | |
1870 | tree rhs = gimple_assign_rhs1 (stmt); | |
1871 | gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1872 | == GIMPLE_SINGLE_RHS); | |
1873 | if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | |
1874 | SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | |
1875 | expand_assignment (lhs, rhs, | |
1876 | gimple_assign_nontemporal_move_p (stmt)); | |
1877 | } | |
1878 | else | |
1879 | { | |
1880 | rtx target, temp; | |
1881 | bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | |
1882 | struct separate_ops ops; | |
1883 | bool promoted = false; | |
1884 | ||
1885 | target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
1886 | if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | |
1887 | promoted = true; | |
1888 | ||
1889 | ops.code = gimple_assign_rhs_code (stmt); | |
1890 | ops.type = TREE_TYPE (lhs); | |
1891 | switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | |
1892 | { | |
1893 | case GIMPLE_BINARY_RHS: | |
1894 | ops.op1 = gimple_assign_rhs2 (stmt); | |
1895 | /* Fallthru */ | |
1896 | case GIMPLE_UNARY_RHS: | |
1897 | ops.op0 = gimple_assign_rhs1 (stmt); | |
1898 | break; | |
1899 | default: | |
1900 | gcc_unreachable (); | |
1901 | } | |
1902 | ops.location = gimple_location (stmt); | |
1903 | ||
1904 | /* If we want to use a nontemporal store, force the value to | |
1905 | register first. If we store into a promoted register, | |
1906 | don't directly expand to target. */ | |
1907 | temp = nontemporal || promoted ? NULL_RTX : target; | |
1908 | temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), | |
1909 | EXPAND_NORMAL); | |
1910 | ||
1911 | if (temp == target) | |
1912 | ; | |
1913 | else if (promoted) | |
1914 | { | |
4e18a7d4 | 1915 | int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); |
28ed065e MM |
1916 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
1917 | sure that we properly convert it. */ | |
1918 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1919 | { | |
1920 | temp = convert_modes (GET_MODE (target), | |
1921 | TYPE_MODE (ops.type), | |
4e18a7d4 | 1922 | temp, unsignedp); |
28ed065e | 1923 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), |
4e18a7d4 | 1924 | GET_MODE (target), temp, unsignedp); |
28ed065e MM |
1925 | } |
1926 | ||
4e18a7d4 | 1927 | convert_move (SUBREG_REG (target), temp, unsignedp); |
28ed065e MM |
1928 | } |
1929 | else if (nontemporal && emit_storent_insn (target, temp)) | |
1930 | ; | |
1931 | else | |
1932 | { | |
1933 | temp = force_operand (temp, target); | |
1934 | if (temp != target) | |
1935 | emit_move_insn (target, temp); | |
1936 | } | |
1937 | } | |
1938 | } | |
1939 | break; | |
1940 | ||
1941 | default: | |
1942 | gcc_unreachable (); | |
1943 | } | |
1944 | } | |
1945 | ||
1946 | /* Expand one gimple statement STMT and return the last RTL instruction | |
1947 | before any of the newly generated ones. | |
1948 | ||
1949 | In addition to generating the necessary RTL instructions this also | |
1950 | sets REG_EH_REGION notes if necessary and sets the current source | |
1951 | location for diagnostics. */ | |
1952 | ||
1953 | static rtx | |
1954 | expand_gimple_stmt (gimple stmt) | |
1955 | { | |
1d65f45c | 1956 | int lp_nr = 0; |
28ed065e MM |
1957 | rtx last = NULL; |
1958 | location_t saved_location = input_location; | |
1959 | ||
1960 | last = get_last_insn (); | |
1961 | ||
1962 | /* If this is an expression of some kind and it has an associated line | |
1963 | number, then emit the line number before expanding the expression. | |
1964 | ||
1965 | We need to save and restore the file and line information so that | |
1966 | errors discovered during expansion are emitted with the right | |
1967 | information. It would be better of the diagnostic routines | |
1968 | used the file/line information embedded in the tree nodes rather | |
1969 | than globals. */ | |
1970 | gcc_assert (cfun); | |
1971 | ||
1972 | if (gimple_has_location (stmt)) | |
1973 | { | |
1974 | input_location = gimple_location (stmt); | |
1975 | set_curr_insn_source_location (input_location); | |
1976 | ||
1977 | /* Record where the insns produced belong. */ | |
1978 | set_curr_insn_block (gimple_block (stmt)); | |
1979 | } | |
1980 | ||
1981 | expand_gimple_stmt_1 (stmt); | |
1982 | /* Free any temporaries used to evaluate this statement. */ | |
1983 | free_temp_slots (); | |
1984 | ||
1985 | input_location = saved_location; | |
1986 | ||
1987 | /* Mark all insns that may trap. */ | |
1d65f45c RH |
1988 | lp_nr = lookup_stmt_eh_lp (stmt); |
1989 | if (lp_nr) | |
28ed065e MM |
1990 | { |
1991 | rtx insn; | |
1992 | for (insn = next_real_insn (last); insn; | |
1993 | insn = next_real_insn (insn)) | |
1994 | { | |
1995 | if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
1996 | /* If we want exceptions for non-call insns, any | |
1997 | may_trap_p instruction may throw. */ | |
1998 | && GET_CODE (PATTERN (insn)) != CLOBBER | |
1999 | && GET_CODE (PATTERN (insn)) != USE | |
1d65f45c RH |
2000 | && insn_could_throw_p (insn)) |
2001 | make_reg_eh_region_note (insn, 0, lp_nr); | |
28ed065e MM |
2002 | } |
2003 | } | |
2004 | ||
2005 | return last; | |
2006 | } | |
2007 | ||
726a989a | 2008 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL |
224e770b RH |
2009 | that has CALL_EXPR_TAILCALL set. Returns non-null if we actually |
2010 | generated a tail call (something that might be denied by the ABI | |
cea49550 RH |
2011 | rules governing the call; see calls.c). |
2012 | ||
2013 | Sets CAN_FALLTHRU if we generated a *conditional* tail call, and | |
2014 | can still reach the rest of BB. The case here is __builtin_sqrt, | |
2015 | where the NaN result goes through the external function (with a | |
2016 | tailcall) and the normal result happens via a sqrt instruction. */ | |
80c7a9eb RH |
2017 | |
2018 | static basic_block | |
726a989a | 2019 | expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) |
80c7a9eb | 2020 | { |
b7211528 | 2021 | rtx last2, last; |
224e770b | 2022 | edge e; |
628f6a4e | 2023 | edge_iterator ei; |
224e770b RH |
2024 | int probability; |
2025 | gcov_type count; | |
80c7a9eb | 2026 | |
28ed065e | 2027 | last2 = last = expand_gimple_stmt (stmt); |
80c7a9eb RH |
2028 | |
2029 | for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | |
224e770b RH |
2030 | if (CALL_P (last) && SIBLING_CALL_P (last)) |
2031 | goto found; | |
80c7a9eb | 2032 | |
726a989a | 2033 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 2034 | |
cea49550 | 2035 | *can_fallthru = true; |
224e770b | 2036 | return NULL; |
80c7a9eb | 2037 | |
224e770b RH |
2038 | found: |
2039 | /* ??? Wouldn't it be better to just reset any pending stack adjust? | |
2040 | Any instructions emitted here are about to be deleted. */ | |
2041 | do_pending_stack_adjust (); | |
2042 | ||
2043 | /* Remove any non-eh, non-abnormal edges that don't go to exit. */ | |
2044 | /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be | |
2045 | EH or abnormal edges, we shouldn't have created a tail call in | |
2046 | the first place. So it seems to me we should just be removing | |
2047 | all edges here, or redirecting the existing fallthru edge to | |
2048 | the exit block. */ | |
2049 | ||
224e770b RH |
2050 | probability = 0; |
2051 | count = 0; | |
224e770b | 2052 | |
628f6a4e BE |
2053 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
2054 | { | |
224e770b RH |
2055 | if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
2056 | { | |
2057 | if (e->dest != EXIT_BLOCK_PTR) | |
80c7a9eb | 2058 | { |
224e770b RH |
2059 | e->dest->count -= e->count; |
2060 | e->dest->frequency -= EDGE_FREQUENCY (e); | |
2061 | if (e->dest->count < 0) | |
c22cacf3 | 2062 | e->dest->count = 0; |
224e770b | 2063 | if (e->dest->frequency < 0) |
c22cacf3 | 2064 | e->dest->frequency = 0; |
80c7a9eb | 2065 | } |
224e770b RH |
2066 | count += e->count; |
2067 | probability += e->probability; | |
2068 | remove_edge (e); | |
80c7a9eb | 2069 | } |
628f6a4e BE |
2070 | else |
2071 | ei_next (&ei); | |
80c7a9eb RH |
2072 | } |
2073 | ||
224e770b RH |
2074 | /* This is somewhat ugly: the call_expr expander often emits instructions |
2075 | after the sibcall (to perform the function return). These confuse the | |
12eff7b7 | 2076 | find_many_sub_basic_blocks code, so we need to get rid of these. */ |
224e770b | 2077 | last = NEXT_INSN (last); |
341c100f | 2078 | gcc_assert (BARRIER_P (last)); |
cea49550 RH |
2079 | |
2080 | *can_fallthru = false; | |
224e770b RH |
2081 | while (NEXT_INSN (last)) |
2082 | { | |
2083 | /* For instance an sqrt builtin expander expands if with | |
2084 | sibcall in the then and label for `else`. */ | |
2085 | if (LABEL_P (NEXT_INSN (last))) | |
cea49550 RH |
2086 | { |
2087 | *can_fallthru = true; | |
2088 | break; | |
2089 | } | |
224e770b RH |
2090 | delete_insn (NEXT_INSN (last)); |
2091 | } | |
2092 | ||
2093 | e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | |
2094 | e->probability += probability; | |
2095 | e->count += count; | |
2096 | BB_END (bb) = last; | |
2097 | update_bb_for_insn (bb); | |
2098 | ||
2099 | if (NEXT_INSN (last)) | |
2100 | { | |
2101 | bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
2102 | ||
2103 | last = BB_END (bb); | |
2104 | if (BARRIER_P (last)) | |
2105 | BB_END (bb) = PREV_INSN (last); | |
2106 | } | |
2107 | ||
726a989a | 2108 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 2109 | |
224e770b | 2110 | return bb; |
80c7a9eb RH |
2111 | } |
2112 | ||
b5b8b0ac AO |
2113 | /* Return the difference between the floor and the truncated result of |
2114 | a signed division by OP1 with remainder MOD. */ | |
2115 | static rtx | |
2116 | floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2117 | { | |
2118 | /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | |
2119 | return gen_rtx_IF_THEN_ELSE | |
2120 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2121 | gen_rtx_IF_THEN_ELSE | |
2122 | (mode, gen_rtx_LT (BImode, | |
2123 | gen_rtx_DIV (mode, op1, mod), | |
2124 | const0_rtx), | |
2125 | constm1_rtx, const0_rtx), | |
2126 | const0_rtx); | |
2127 | } | |
2128 | ||
2129 | /* Return the difference between the ceil and the truncated result of | |
2130 | a signed division by OP1 with remainder MOD. */ | |
2131 | static rtx | |
2132 | ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2133 | { | |
2134 | /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | |
2135 | return gen_rtx_IF_THEN_ELSE | |
2136 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2137 | gen_rtx_IF_THEN_ELSE | |
2138 | (mode, gen_rtx_GT (BImode, | |
2139 | gen_rtx_DIV (mode, op1, mod), | |
2140 | const0_rtx), | |
2141 | const1_rtx, const0_rtx), | |
2142 | const0_rtx); | |
2143 | } | |
2144 | ||
2145 | /* Return the difference between the ceil and the truncated result of | |
2146 | an unsigned division by OP1 with remainder MOD. */ | |
2147 | static rtx | |
2148 | ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) | |
2149 | { | |
2150 | /* (mod != 0 ? 1 : 0) */ | |
2151 | return gen_rtx_IF_THEN_ELSE | |
2152 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2153 | const1_rtx, const0_rtx); | |
2154 | } | |
2155 | ||
2156 | /* Return the difference between the rounded and the truncated result | |
2157 | of a signed division by OP1 with remainder MOD. Halfway cases are | |
2158 | rounded away from zero, rather than to the nearest even number. */ | |
2159 | static rtx | |
2160 | round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2161 | { | |
2162 | /* (abs (mod) >= abs (op1) - abs (mod) | |
2163 | ? (op1 / mod > 0 ? 1 : -1) | |
2164 | : 0) */ | |
2165 | return gen_rtx_IF_THEN_ELSE | |
2166 | (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), | |
2167 | gen_rtx_MINUS (mode, | |
2168 | gen_rtx_ABS (mode, op1), | |
2169 | gen_rtx_ABS (mode, mod))), | |
2170 | gen_rtx_IF_THEN_ELSE | |
2171 | (mode, gen_rtx_GT (BImode, | |
2172 | gen_rtx_DIV (mode, op1, mod), | |
2173 | const0_rtx), | |
2174 | const1_rtx, constm1_rtx), | |
2175 | const0_rtx); | |
2176 | } | |
2177 | ||
2178 | /* Return the difference between the rounded and the truncated result | |
2179 | of a unsigned division by OP1 with remainder MOD. Halfway cases | |
2180 | are rounded away from zero, rather than to the nearest even | |
2181 | number. */ | |
2182 | static rtx | |
2183 | round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2184 | { | |
2185 | /* (mod >= op1 - mod ? 1 : 0) */ | |
2186 | return gen_rtx_IF_THEN_ELSE | |
2187 | (mode, gen_rtx_GE (BImode, mod, | |
2188 | gen_rtx_MINUS (mode, op1, mod)), | |
2189 | const1_rtx, const0_rtx); | |
2190 | } | |
2191 | ||
dda2da58 AO |
2192 | /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting |
2193 | any rtl. */ | |
2194 | ||
2195 | static rtx | |
2196 | convert_debug_memory_address (enum machine_mode mode, rtx x) | |
2197 | { | |
2198 | enum machine_mode xmode = GET_MODE (x); | |
2199 | ||
2200 | #ifndef POINTERS_EXTEND_UNSIGNED | |
2201 | gcc_assert (mode == Pmode); | |
2202 | gcc_assert (xmode == mode || xmode == VOIDmode); | |
2203 | #else | |
2204 | gcc_assert (mode == Pmode || mode == ptr_mode); | |
2205 | ||
2206 | if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | |
2207 | return x; | |
2208 | ||
2209 | if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | |
2210 | x = simplify_gen_subreg (mode, x, xmode, | |
2211 | subreg_lowpart_offset | |
2212 | (mode, xmode)); | |
2213 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
2214 | x = gen_rtx_ZERO_EXTEND (mode, x); | |
2215 | else if (!POINTERS_EXTEND_UNSIGNED) | |
2216 | x = gen_rtx_SIGN_EXTEND (mode, x); | |
2217 | else | |
2218 | gcc_unreachable (); | |
2219 | #endif /* POINTERS_EXTEND_UNSIGNED */ | |
2220 | ||
2221 | return x; | |
2222 | } | |
2223 | ||
b5b8b0ac AO |
2224 | /* Return an RTX equivalent to the value of the tree expression |
2225 | EXP. */ | |
2226 | ||
2227 | static rtx | |
2228 | expand_debug_expr (tree exp) | |
2229 | { | |
2230 | rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | |
2231 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
2232 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | |
09e881c9 | 2233 | addr_space_t as; |
d4ebfa65 | 2234 | enum machine_mode address_mode; |
b5b8b0ac AO |
2235 | |
2236 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) | |
2237 | { | |
2238 | case tcc_expression: | |
2239 | switch (TREE_CODE (exp)) | |
2240 | { | |
2241 | case COND_EXPR: | |
2242 | goto ternary; | |
2243 | ||
2244 | case TRUTH_ANDIF_EXPR: | |
2245 | case TRUTH_ORIF_EXPR: | |
2246 | case TRUTH_AND_EXPR: | |
2247 | case TRUTH_OR_EXPR: | |
2248 | case TRUTH_XOR_EXPR: | |
2249 | goto binary; | |
2250 | ||
2251 | case TRUTH_NOT_EXPR: | |
2252 | goto unary; | |
2253 | ||
2254 | default: | |
2255 | break; | |
2256 | } | |
2257 | break; | |
2258 | ||
2259 | ternary: | |
2260 | op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | |
2261 | if (!op2) | |
2262 | return NULL_RTX; | |
2263 | /* Fall through. */ | |
2264 | ||
2265 | binary: | |
2266 | case tcc_binary: | |
2267 | case tcc_comparison: | |
2268 | op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | |
2269 | if (!op1) | |
2270 | return NULL_RTX; | |
2271 | /* Fall through. */ | |
2272 | ||
2273 | unary: | |
2274 | case tcc_unary: | |
2275 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2276 | if (!op0) | |
2277 | return NULL_RTX; | |
2278 | break; | |
2279 | ||
2280 | case tcc_type: | |
2281 | case tcc_statement: | |
2282 | gcc_unreachable (); | |
2283 | ||
2284 | case tcc_constant: | |
2285 | case tcc_exceptional: | |
2286 | case tcc_declaration: | |
2287 | case tcc_reference: | |
2288 | case tcc_vl_exp: | |
2289 | break; | |
2290 | } | |
2291 | ||
2292 | switch (TREE_CODE (exp)) | |
2293 | { | |
2294 | case STRING_CST: | |
2295 | if (!lookup_constant_def (exp)) | |
2296 | { | |
e1b243a8 JJ |
2297 | if (strlen (TREE_STRING_POINTER (exp)) + 1 |
2298 | != (size_t) TREE_STRING_LENGTH (exp)) | |
2299 | return NULL_RTX; | |
b5b8b0ac AO |
2300 | op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); |
2301 | op0 = gen_rtx_MEM (BLKmode, op0); | |
2302 | set_mem_attributes (op0, exp, 0); | |
2303 | return op0; | |
2304 | } | |
2305 | /* Fall through... */ | |
2306 | ||
2307 | case INTEGER_CST: | |
2308 | case REAL_CST: | |
2309 | case FIXED_CST: | |
2310 | op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); | |
2311 | return op0; | |
2312 | ||
2313 | case COMPLEX_CST: | |
2314 | gcc_assert (COMPLEX_MODE_P (mode)); | |
2315 | op0 = expand_debug_expr (TREE_REALPART (exp)); | |
b5b8b0ac | 2316 | op1 = expand_debug_expr (TREE_IMAGPART (exp)); |
b5b8b0ac AO |
2317 | return gen_rtx_CONCAT (mode, op0, op1); |
2318 | ||
0ca5af51 AO |
2319 | case DEBUG_EXPR_DECL: |
2320 | op0 = DECL_RTL_IF_SET (exp); | |
2321 | ||
2322 | if (op0) | |
2323 | return op0; | |
2324 | ||
2325 | op0 = gen_rtx_DEBUG_EXPR (mode); | |
e4fb38bd | 2326 | DEBUG_EXPR_TREE_DECL (op0) = exp; |
0ca5af51 AO |
2327 | SET_DECL_RTL (exp, op0); |
2328 | ||
2329 | return op0; | |
2330 | ||
b5b8b0ac AO |
2331 | case VAR_DECL: |
2332 | case PARM_DECL: | |
2333 | case FUNCTION_DECL: | |
2334 | case LABEL_DECL: | |
2335 | case CONST_DECL: | |
2336 | case RESULT_DECL: | |
2337 | op0 = DECL_RTL_IF_SET (exp); | |
2338 | ||
2339 | /* This decl was probably optimized away. */ | |
2340 | if (!op0) | |
e1b243a8 JJ |
2341 | { |
2342 | if (TREE_CODE (exp) != VAR_DECL | |
2343 | || DECL_EXTERNAL (exp) | |
2344 | || !TREE_STATIC (exp) | |
2345 | || !DECL_NAME (exp) | |
0fba566c JJ |
2346 | || DECL_HARD_REGISTER (exp) |
2347 | || mode == VOIDmode) | |
e1b243a8 JJ |
2348 | return NULL; |
2349 | ||
2350 | op0 = DECL_RTL (exp); | |
2351 | SET_DECL_RTL (exp, NULL); | |
2352 | if (!MEM_P (op0) | |
2353 | || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | |
2354 | || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | |
2355 | return NULL; | |
2356 | } | |
2357 | else | |
2358 | op0 = copy_rtx (op0); | |
b5b8b0ac AO |
2359 | |
2360 | if (GET_MODE (op0) == BLKmode) | |
2361 | { | |
2362 | gcc_assert (MEM_P (op0)); | |
2363 | op0 = adjust_address_nv (op0, mode, 0); | |
2364 | return op0; | |
2365 | } | |
2366 | ||
2367 | /* Fall through. */ | |
2368 | ||
2369 | adjust_mode: | |
2370 | case PAREN_EXPR: | |
2371 | case NOP_EXPR: | |
2372 | case CONVERT_EXPR: | |
2373 | { | |
2374 | enum machine_mode inner_mode = GET_MODE (op0); | |
2375 | ||
2376 | if (mode == inner_mode) | |
2377 | return op0; | |
2378 | ||
2379 | if (inner_mode == VOIDmode) | |
2380 | { | |
2381 | inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
2382 | if (mode == inner_mode) | |
2383 | return op0; | |
2384 | } | |
2385 | ||
2386 | if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
2387 | { | |
2388 | if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | |
2389 | op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
2390 | else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | |
2391 | op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
2392 | else | |
2393 | op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
2394 | } | |
2395 | else if (FLOAT_MODE_P (mode)) | |
2396 | { | |
2397 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
2398 | op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2399 | else | |
2400 | op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2401 | } | |
2402 | else if (FLOAT_MODE_P (inner_mode)) | |
2403 | { | |
2404 | if (unsignedp) | |
2405 | op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2406 | else | |
2407 | op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2408 | } | |
2409 | else if (CONSTANT_P (op0) | |
2410 | || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2411 | op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2412 | subreg_lowpart_offset (mode, | |
2413 | inner_mode)); | |
2414 | else if (unsignedp) | |
2415 | op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2416 | else | |
2417 | op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2418 | ||
2419 | return op0; | |
2420 | } | |
2421 | ||
2422 | case INDIRECT_REF: | |
2423 | case ALIGN_INDIRECT_REF: | |
2424 | case MISALIGNED_INDIRECT_REF: | |
2425 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2426 | if (!op0) | |
2427 | return NULL; | |
2428 | ||
09e881c9 | 2429 | if (POINTER_TYPE_P (TREE_TYPE (exp))) |
4e25ca6b EB |
2430 | { |
2431 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2432 | address_mode = targetm.addr_space.address_mode (as); | |
2433 | } | |
09e881c9 | 2434 | else |
4e25ca6b EB |
2435 | { |
2436 | as = ADDR_SPACE_GENERIC; | |
2437 | address_mode = Pmode; | |
2438 | } | |
b5b8b0ac AO |
2439 | |
2440 | if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) | |
2441 | { | |
2442 | int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); | |
d4ebfa65 | 2443 | op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); |
b5b8b0ac AO |
2444 | } |
2445 | ||
2446 | op0 = gen_rtx_MEM (mode, op0); | |
2447 | ||
2448 | set_mem_attributes (op0, exp, 0); | |
09e881c9 | 2449 | set_mem_addr_space (op0, as); |
b5b8b0ac AO |
2450 | |
2451 | return op0; | |
2452 | ||
2453 | case TARGET_MEM_REF: | |
2454 | if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) | |
2455 | return NULL; | |
2456 | ||
2457 | op0 = expand_debug_expr | |
4e25ca6b | 2458 | (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); |
b5b8b0ac AO |
2459 | if (!op0) |
2460 | return NULL; | |
2461 | ||
09e881c9 | 2462 | as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); |
b5b8b0ac AO |
2463 | |
2464 | op0 = gen_rtx_MEM (mode, op0); | |
2465 | ||
2466 | set_mem_attributes (op0, exp, 0); | |
09e881c9 | 2467 | set_mem_addr_space (op0, as); |
b5b8b0ac AO |
2468 | |
2469 | return op0; | |
2470 | ||
2471 | case ARRAY_REF: | |
2472 | case ARRAY_RANGE_REF: | |
2473 | case COMPONENT_REF: | |
2474 | case BIT_FIELD_REF: | |
2475 | case REALPART_EXPR: | |
2476 | case IMAGPART_EXPR: | |
2477 | case VIEW_CONVERT_EXPR: | |
2478 | { | |
2479 | enum machine_mode mode1; | |
2480 | HOST_WIDE_INT bitsize, bitpos; | |
2481 | tree offset; | |
2482 | int volatilep = 0; | |
2483 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
2484 | &mode1, &unsignedp, &volatilep, false); | |
2485 | rtx orig_op0; | |
2486 | ||
4f2a9af8 JJ |
2487 | if (bitsize == 0) |
2488 | return NULL; | |
2489 | ||
b5b8b0ac AO |
2490 | orig_op0 = op0 = expand_debug_expr (tem); |
2491 | ||
2492 | if (!op0) | |
2493 | return NULL; | |
2494 | ||
2495 | if (offset) | |
2496 | { | |
dda2da58 AO |
2497 | enum machine_mode addrmode, offmode; |
2498 | ||
b5b8b0ac AO |
2499 | gcc_assert (MEM_P (op0)); |
2500 | ||
dda2da58 AO |
2501 | op0 = XEXP (op0, 0); |
2502 | addrmode = GET_MODE (op0); | |
2503 | if (addrmode == VOIDmode) | |
2504 | addrmode = Pmode; | |
2505 | ||
b5b8b0ac AO |
2506 | op1 = expand_debug_expr (offset); |
2507 | if (!op1) | |
2508 | return NULL; | |
2509 | ||
dda2da58 AO |
2510 | offmode = GET_MODE (op1); |
2511 | if (offmode == VOIDmode) | |
2512 | offmode = TYPE_MODE (TREE_TYPE (offset)); | |
2513 | ||
2514 | if (addrmode != offmode) | |
2515 | op1 = simplify_gen_subreg (addrmode, op1, offmode, | |
2516 | subreg_lowpart_offset (addrmode, | |
2517 | offmode)); | |
2518 | ||
2519 | /* Don't use offset_address here, we don't need a | |
2520 | recognizable address, and we don't want to generate | |
2521 | code. */ | |
2522 | op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | |
b5b8b0ac AO |
2523 | } |
2524 | ||
2525 | if (MEM_P (op0)) | |
2526 | { | |
4f2a9af8 JJ |
2527 | if (mode1 == VOIDmode) |
2528 | /* Bitfield. */ | |
2529 | mode1 = smallest_mode_for_size (bitsize, MODE_INT); | |
b5b8b0ac AO |
2530 | if (bitpos >= BITS_PER_UNIT) |
2531 | { | |
2532 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | |
2533 | bitpos %= BITS_PER_UNIT; | |
2534 | } | |
2535 | else if (bitpos < 0) | |
2536 | { | |
4f2a9af8 JJ |
2537 | HOST_WIDE_INT units |
2538 | = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | |
b5b8b0ac AO |
2539 | op0 = adjust_address_nv (op0, mode1, units); |
2540 | bitpos += units * BITS_PER_UNIT; | |
2541 | } | |
2542 | else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | |
2543 | op0 = adjust_address_nv (op0, mode, 0); | |
2544 | else if (GET_MODE (op0) != mode1) | |
2545 | op0 = adjust_address_nv (op0, mode1, 0); | |
2546 | else | |
2547 | op0 = copy_rtx (op0); | |
2548 | if (op0 == orig_op0) | |
2549 | op0 = shallow_copy_rtx (op0); | |
2550 | set_mem_attributes (op0, exp, 0); | |
2551 | } | |
2552 | ||
2553 | if (bitpos == 0 && mode == GET_MODE (op0)) | |
2554 | return op0; | |
2555 | ||
2d3fc6aa JJ |
2556 | if (bitpos < 0) |
2557 | return NULL; | |
2558 | ||
b5b8b0ac AO |
2559 | if ((bitpos % BITS_PER_UNIT) == 0 |
2560 | && bitsize == GET_MODE_BITSIZE (mode1)) | |
2561 | { | |
2562 | enum machine_mode opmode = GET_MODE (op0); | |
2563 | ||
2564 | gcc_assert (opmode != BLKmode); | |
2565 | ||
2566 | if (opmode == VOIDmode) | |
2567 | opmode = mode1; | |
2568 | ||
2569 | /* This condition may hold if we're expanding the address | |
2570 | right past the end of an array that turned out not to | |
2571 | be addressable (i.e., the address was only computed in | |
2572 | debug stmts). The gen_subreg below would rightfully | |
2573 | crash, and the address doesn't really exist, so just | |
2574 | drop it. */ | |
2575 | if (bitpos >= GET_MODE_BITSIZE (opmode)) | |
2576 | return NULL; | |
2577 | ||
2578 | return simplify_gen_subreg (mode, op0, opmode, | |
2579 | bitpos / BITS_PER_UNIT); | |
2580 | } | |
2581 | ||
2582 | return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
2583 | && TYPE_UNSIGNED (TREE_TYPE (exp)) | |
2584 | ? SIGN_EXTRACT | |
2585 | : ZERO_EXTRACT, mode, | |
2586 | GET_MODE (op0) != VOIDmode | |
2587 | ? GET_MODE (op0) : mode1, | |
2588 | op0, GEN_INT (bitsize), GEN_INT (bitpos)); | |
2589 | } | |
2590 | ||
b5b8b0ac AO |
2591 | case ABS_EXPR: |
2592 | return gen_rtx_ABS (mode, op0); | |
2593 | ||
2594 | case NEGATE_EXPR: | |
2595 | return gen_rtx_NEG (mode, op0); | |
2596 | ||
2597 | case BIT_NOT_EXPR: | |
2598 | return gen_rtx_NOT (mode, op0); | |
2599 | ||
2600 | case FLOAT_EXPR: | |
2601 | if (unsignedp) | |
2602 | return gen_rtx_UNSIGNED_FLOAT (mode, op0); | |
2603 | else | |
2604 | return gen_rtx_FLOAT (mode, op0); | |
2605 | ||
2606 | case FIX_TRUNC_EXPR: | |
2607 | if (unsignedp) | |
2608 | return gen_rtx_UNSIGNED_FIX (mode, op0); | |
2609 | else | |
2610 | return gen_rtx_FIX (mode, op0); | |
2611 | ||
2612 | case POINTER_PLUS_EXPR: | |
2613 | case PLUS_EXPR: | |
2614 | return gen_rtx_PLUS (mode, op0, op1); | |
2615 | ||
2616 | case MINUS_EXPR: | |
2617 | return gen_rtx_MINUS (mode, op0, op1); | |
2618 | ||
2619 | case MULT_EXPR: | |
2620 | return gen_rtx_MULT (mode, op0, op1); | |
2621 | ||
2622 | case RDIV_EXPR: | |
2623 | case TRUNC_DIV_EXPR: | |
2624 | case EXACT_DIV_EXPR: | |
2625 | if (unsignedp) | |
2626 | return gen_rtx_UDIV (mode, op0, op1); | |
2627 | else | |
2628 | return gen_rtx_DIV (mode, op0, op1); | |
2629 | ||
2630 | case TRUNC_MOD_EXPR: | |
2631 | if (unsignedp) | |
2632 | return gen_rtx_UMOD (mode, op0, op1); | |
2633 | else | |
2634 | return gen_rtx_MOD (mode, op0, op1); | |
2635 | ||
2636 | case FLOOR_DIV_EXPR: | |
2637 | if (unsignedp) | |
2638 | return gen_rtx_UDIV (mode, op0, op1); | |
2639 | else | |
2640 | { | |
2641 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2642 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2643 | rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2644 | return gen_rtx_PLUS (mode, div, adj); | |
2645 | } | |
2646 | ||
2647 | case FLOOR_MOD_EXPR: | |
2648 | if (unsignedp) | |
2649 | return gen_rtx_UMOD (mode, op0, op1); | |
2650 | else | |
2651 | { | |
2652 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2653 | rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2654 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2655 | return gen_rtx_PLUS (mode, mod, adj); | |
2656 | } | |
2657 | ||
2658 | case CEIL_DIV_EXPR: | |
2659 | if (unsignedp) | |
2660 | { | |
2661 | rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2662 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2663 | rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2664 | return gen_rtx_PLUS (mode, div, adj); | |
2665 | } | |
2666 | else | |
2667 | { | |
2668 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2669 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2670 | rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2671 | return gen_rtx_PLUS (mode, div, adj); | |
2672 | } | |
2673 | ||
2674 | case CEIL_MOD_EXPR: | |
2675 | if (unsignedp) | |
2676 | { | |
2677 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2678 | rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2679 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2680 | return gen_rtx_PLUS (mode, mod, adj); | |
2681 | } | |
2682 | else | |
2683 | { | |
2684 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2685 | rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2686 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2687 | return gen_rtx_PLUS (mode, mod, adj); | |
2688 | } | |
2689 | ||
2690 | case ROUND_DIV_EXPR: | |
2691 | if (unsignedp) | |
2692 | { | |
2693 | rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2694 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2695 | rtx adj = round_udiv_adjust (mode, mod, op1); | |
2696 | return gen_rtx_PLUS (mode, div, adj); | |
2697 | } | |
2698 | else | |
2699 | { | |
2700 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2701 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2702 | rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2703 | return gen_rtx_PLUS (mode, div, adj); | |
2704 | } | |
2705 | ||
2706 | case ROUND_MOD_EXPR: | |
2707 | if (unsignedp) | |
2708 | { | |
2709 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2710 | rtx adj = round_udiv_adjust (mode, mod, op1); | |
2711 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2712 | return gen_rtx_PLUS (mode, mod, adj); | |
2713 | } | |
2714 | else | |
2715 | { | |
2716 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2717 | rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2718 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2719 | return gen_rtx_PLUS (mode, mod, adj); | |
2720 | } | |
2721 | ||
2722 | case LSHIFT_EXPR: | |
2723 | return gen_rtx_ASHIFT (mode, op0, op1); | |
2724 | ||
2725 | case RSHIFT_EXPR: | |
2726 | if (unsignedp) | |
2727 | return gen_rtx_LSHIFTRT (mode, op0, op1); | |
2728 | else | |
2729 | return gen_rtx_ASHIFTRT (mode, op0, op1); | |
2730 | ||
2731 | case LROTATE_EXPR: | |
2732 | return gen_rtx_ROTATE (mode, op0, op1); | |
2733 | ||
2734 | case RROTATE_EXPR: | |
2735 | return gen_rtx_ROTATERT (mode, op0, op1); | |
2736 | ||
2737 | case MIN_EXPR: | |
2738 | if (unsignedp) | |
2739 | return gen_rtx_UMIN (mode, op0, op1); | |
2740 | else | |
2741 | return gen_rtx_SMIN (mode, op0, op1); | |
2742 | ||
2743 | case MAX_EXPR: | |
2744 | if (unsignedp) | |
2745 | return gen_rtx_UMAX (mode, op0, op1); | |
2746 | else | |
2747 | return gen_rtx_SMAX (mode, op0, op1); | |
2748 | ||
2749 | case BIT_AND_EXPR: | |
2750 | case TRUTH_AND_EXPR: | |
2751 | return gen_rtx_AND (mode, op0, op1); | |
2752 | ||
2753 | case BIT_IOR_EXPR: | |
2754 | case TRUTH_OR_EXPR: | |
2755 | return gen_rtx_IOR (mode, op0, op1); | |
2756 | ||
2757 | case BIT_XOR_EXPR: | |
2758 | case TRUTH_XOR_EXPR: | |
2759 | return gen_rtx_XOR (mode, op0, op1); | |
2760 | ||
2761 | case TRUTH_ANDIF_EXPR: | |
2762 | return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); | |
2763 | ||
2764 | case TRUTH_ORIF_EXPR: | |
2765 | return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); | |
2766 | ||
2767 | case TRUTH_NOT_EXPR: | |
2768 | return gen_rtx_EQ (mode, op0, const0_rtx); | |
2769 | ||
2770 | case LT_EXPR: | |
2771 | if (unsignedp) | |
2772 | return gen_rtx_LTU (mode, op0, op1); | |
2773 | else | |
2774 | return gen_rtx_LT (mode, op0, op1); | |
2775 | ||
2776 | case LE_EXPR: | |
2777 | if (unsignedp) | |
2778 | return gen_rtx_LEU (mode, op0, op1); | |
2779 | else | |
2780 | return gen_rtx_LE (mode, op0, op1); | |
2781 | ||
2782 | case GT_EXPR: | |
2783 | if (unsignedp) | |
2784 | return gen_rtx_GTU (mode, op0, op1); | |
2785 | else | |
2786 | return gen_rtx_GT (mode, op0, op1); | |
2787 | ||
2788 | case GE_EXPR: | |
2789 | if (unsignedp) | |
2790 | return gen_rtx_GEU (mode, op0, op1); | |
2791 | else | |
2792 | return gen_rtx_GE (mode, op0, op1); | |
2793 | ||
2794 | case EQ_EXPR: | |
2795 | return gen_rtx_EQ (mode, op0, op1); | |
2796 | ||
2797 | case NE_EXPR: | |
2798 | return gen_rtx_NE (mode, op0, op1); | |
2799 | ||
2800 | case UNORDERED_EXPR: | |
2801 | return gen_rtx_UNORDERED (mode, op0, op1); | |
2802 | ||
2803 | case ORDERED_EXPR: | |
2804 | return gen_rtx_ORDERED (mode, op0, op1); | |
2805 | ||
2806 | case UNLT_EXPR: | |
2807 | return gen_rtx_UNLT (mode, op0, op1); | |
2808 | ||
2809 | case UNLE_EXPR: | |
2810 | return gen_rtx_UNLE (mode, op0, op1); | |
2811 | ||
2812 | case UNGT_EXPR: | |
2813 | return gen_rtx_UNGT (mode, op0, op1); | |
2814 | ||
2815 | case UNGE_EXPR: | |
2816 | return gen_rtx_UNGE (mode, op0, op1); | |
2817 | ||
2818 | case UNEQ_EXPR: | |
2819 | return gen_rtx_UNEQ (mode, op0, op1); | |
2820 | ||
2821 | case LTGT_EXPR: | |
2822 | return gen_rtx_LTGT (mode, op0, op1); | |
2823 | ||
2824 | case COND_EXPR: | |
2825 | return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | |
2826 | ||
2827 | case COMPLEX_EXPR: | |
2828 | gcc_assert (COMPLEX_MODE_P (mode)); | |
2829 | if (GET_MODE (op0) == VOIDmode) | |
2830 | op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); | |
2831 | if (GET_MODE (op1) == VOIDmode) | |
2832 | op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); | |
2833 | return gen_rtx_CONCAT (mode, op0, op1); | |
2834 | ||
d02a5a4b JJ |
2835 | case CONJ_EXPR: |
2836 | if (GET_CODE (op0) == CONCAT) | |
2837 | return gen_rtx_CONCAT (mode, XEXP (op0, 0), | |
2838 | gen_rtx_NEG (GET_MODE_INNER (mode), | |
2839 | XEXP (op0, 1))); | |
2840 | else | |
2841 | { | |
2842 | enum machine_mode imode = GET_MODE_INNER (mode); | |
2843 | rtx re, im; | |
2844 | ||
2845 | if (MEM_P (op0)) | |
2846 | { | |
2847 | re = adjust_address_nv (op0, imode, 0); | |
2848 | im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | |
2849 | } | |
2850 | else | |
2851 | { | |
2852 | enum machine_mode ifmode = int_mode_for_mode (mode); | |
2853 | enum machine_mode ihmode = int_mode_for_mode (imode); | |
2854 | rtx halfsize; | |
2855 | if (ifmode == BLKmode || ihmode == BLKmode) | |
2856 | return NULL; | |
2857 | halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | |
2858 | re = op0; | |
2859 | if (mode != ifmode) | |
2860 | re = gen_rtx_SUBREG (ifmode, re, 0); | |
2861 | re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); | |
2862 | if (imode != ihmode) | |
2863 | re = gen_rtx_SUBREG (imode, re, 0); | |
2864 | im = copy_rtx (op0); | |
2865 | if (mode != ifmode) | |
2866 | im = gen_rtx_SUBREG (ifmode, im, 0); | |
2867 | im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); | |
2868 | if (imode != ihmode) | |
2869 | im = gen_rtx_SUBREG (imode, im, 0); | |
2870 | } | |
2871 | im = gen_rtx_NEG (imode, im); | |
2872 | return gen_rtx_CONCAT (mode, re, im); | |
2873 | } | |
2874 | ||
b5b8b0ac AO |
2875 | case ADDR_EXPR: |
2876 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2877 | if (!op0 || !MEM_P (op0)) | |
2878 | return NULL; | |
2879 | ||
dda2da58 AO |
2880 | op0 = convert_debug_memory_address (mode, XEXP (op0, 0)); |
2881 | ||
2882 | return op0; | |
b5b8b0ac AO |
2883 | |
2884 | case VECTOR_CST: | |
2885 | exp = build_constructor_from_list (TREE_TYPE (exp), | |
2886 | TREE_VECTOR_CST_ELTS (exp)); | |
2887 | /* Fall through. */ | |
2888 | ||
2889 | case CONSTRUCTOR: | |
2890 | if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | |
2891 | { | |
2892 | unsigned i; | |
2893 | tree val; | |
2894 | ||
2895 | op0 = gen_rtx_CONCATN | |
2896 | (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); | |
2897 | ||
2898 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) | |
2899 | { | |
2900 | op1 = expand_debug_expr (val); | |
2901 | if (!op1) | |
2902 | return NULL; | |
2903 | XVECEXP (op0, 0, i) = op1; | |
2904 | } | |
2905 | ||
2906 | if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) | |
2907 | { | |
2908 | op1 = expand_debug_expr | |
2909 | (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); | |
2910 | ||
2911 | if (!op1) | |
2912 | return NULL; | |
2913 | ||
2914 | for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) | |
2915 | XVECEXP (op0, 0, i) = op1; | |
2916 | } | |
2917 | ||
2918 | return op0; | |
2919 | } | |
2920 | else | |
2921 | goto flag_unsupported; | |
2922 | ||
2923 | case CALL_EXPR: | |
2924 | /* ??? Maybe handle some builtins? */ | |
2925 | return NULL; | |
2926 | ||
2927 | case SSA_NAME: | |
2928 | { | |
2929 | int part = var_to_partition (SA.map, exp); | |
2930 | ||
2931 | if (part == NO_PARTITION) | |
2932 | return NULL; | |
2933 | ||
2934 | gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); | |
2935 | ||
2936 | op0 = SA.partition_to_pseudo[part]; | |
2937 | goto adjust_mode; | |
2938 | } | |
2939 | ||
2940 | case ERROR_MARK: | |
2941 | return NULL; | |
2942 | ||
2943 | default: | |
2944 | flag_unsupported: | |
2945 | #ifdef ENABLE_CHECKING | |
2946 | debug_tree (exp); | |
2947 | gcc_unreachable (); | |
2948 | #else | |
2949 | return NULL; | |
2950 | #endif | |
2951 | } | |
2952 | } | |
2953 | ||
2954 | /* Expand the _LOCs in debug insns. We run this after expanding all | |
2955 | regular insns, so that any variables referenced in the function | |
2956 | will have their DECL_RTLs set. */ | |
2957 | ||
2958 | static void | |
2959 | expand_debug_locations (void) | |
2960 | { | |
2961 | rtx insn; | |
2962 | rtx last = get_last_insn (); | |
2963 | int save_strict_alias = flag_strict_aliasing; | |
2964 | ||
2965 | /* New alias sets while setting up memory attributes cause | |
2966 | -fcompare-debug failures, even though it doesn't bring about any | |
2967 | codegen changes. */ | |
2968 | flag_strict_aliasing = 0; | |
2969 | ||
2970 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
2971 | if (DEBUG_INSN_P (insn)) | |
2972 | { | |
2973 | tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | |
2974 | rtx val; | |
2975 | enum machine_mode mode; | |
2976 | ||
2977 | if (value == NULL_TREE) | |
2978 | val = NULL_RTX; | |
2979 | else | |
2980 | { | |
2981 | val = expand_debug_expr (value); | |
2982 | gcc_assert (last == get_last_insn ()); | |
2983 | } | |
2984 | ||
2985 | if (!val) | |
2986 | val = gen_rtx_UNKNOWN_VAR_LOC (); | |
2987 | else | |
2988 | { | |
2989 | mode = GET_MODE (INSN_VAR_LOCATION (insn)); | |
2990 | ||
2991 | gcc_assert (mode == GET_MODE (val) | |
2992 | || (GET_MODE (val) == VOIDmode | |
2993 | && (CONST_INT_P (val) | |
2994 | || GET_CODE (val) == CONST_FIXED | |
2995 | || GET_CODE (val) == CONST_DOUBLE | |
2996 | || GET_CODE (val) == LABEL_REF))); | |
2997 | } | |
2998 | ||
2999 | INSN_VAR_LOCATION_LOC (insn) = val; | |
3000 | } | |
3001 | ||
3002 | flag_strict_aliasing = save_strict_alias; | |
3003 | } | |
3004 | ||
242229bb JH |
3005 | /* Expand basic block BB from GIMPLE trees to RTL. */ |
3006 | ||
3007 | static basic_block | |
10d22567 | 3008 | expand_gimple_basic_block (basic_block bb) |
242229bb | 3009 | { |
726a989a RB |
3010 | gimple_stmt_iterator gsi; |
3011 | gimple_seq stmts; | |
3012 | gimple stmt = NULL; | |
242229bb JH |
3013 | rtx note, last; |
3014 | edge e; | |
628f6a4e | 3015 | edge_iterator ei; |
8b11009b | 3016 | void **elt; |
242229bb JH |
3017 | |
3018 | if (dump_file) | |
726a989a RB |
3019 | fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
3020 | bb->index); | |
3021 | ||
3022 | /* Note that since we are now transitioning from GIMPLE to RTL, we | |
3023 | cannot use the gsi_*_bb() routines because they expect the basic | |
3024 | block to be in GIMPLE, instead of RTL. Therefore, we need to | |
3025 | access the BB sequence directly. */ | |
3026 | stmts = bb_seq (bb); | |
3027 | bb->il.gimple = NULL; | |
bf08ebeb | 3028 | rtl_profile_for_bb (bb); |
5e2d947c JH |
3029 | init_rtl_bb_info (bb); |
3030 | bb->flags |= BB_RTL; | |
3031 | ||
a9b77cd1 ZD |
3032 | /* Remove the RETURN_EXPR if we may fall though to the exit |
3033 | instead. */ | |
726a989a RB |
3034 | gsi = gsi_last (stmts); |
3035 | if (!gsi_end_p (gsi) | |
3036 | && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) | |
a9b77cd1 | 3037 | { |
726a989a | 3038 | gimple ret_stmt = gsi_stmt (gsi); |
a9b77cd1 ZD |
3039 | |
3040 | gcc_assert (single_succ_p (bb)); | |
3041 | gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | |
3042 | ||
3043 | if (bb->next_bb == EXIT_BLOCK_PTR | |
726a989a | 3044 | && !gimple_return_retval (ret_stmt)) |
a9b77cd1 | 3045 | { |
726a989a | 3046 | gsi_remove (&gsi, false); |
a9b77cd1 ZD |
3047 | single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
3048 | } | |
3049 | } | |
3050 | ||
726a989a RB |
3051 | gsi = gsi_start (stmts); |
3052 | if (!gsi_end_p (gsi)) | |
8b11009b | 3053 | { |
726a989a RB |
3054 | stmt = gsi_stmt (gsi); |
3055 | if (gimple_code (stmt) != GIMPLE_LABEL) | |
3056 | stmt = NULL; | |
8b11009b | 3057 | } |
242229bb | 3058 | |
8b11009b ZD |
3059 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
3060 | ||
3061 | if (stmt || elt) | |
242229bb JH |
3062 | { |
3063 | last = get_last_insn (); | |
3064 | ||
8b11009b ZD |
3065 | if (stmt) |
3066 | { | |
28ed065e | 3067 | expand_gimple_stmt (stmt); |
726a989a | 3068 | gsi_next (&gsi); |
8b11009b ZD |
3069 | } |
3070 | ||
3071 | if (elt) | |
ae50c0cb | 3072 | emit_label ((rtx) *elt); |
242229bb | 3073 | |
caf93cb0 | 3074 | /* Java emits line number notes in the top of labels. |
c22cacf3 | 3075 | ??? Make this go away once line number notes are obsoleted. */ |
242229bb | 3076 | BB_HEAD (bb) = NEXT_INSN (last); |
4b4bf941 | 3077 | if (NOTE_P (BB_HEAD (bb))) |
242229bb | 3078 | BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
242229bb | 3079 | note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
b7211528 | 3080 | |
726a989a | 3081 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
242229bb JH |
3082 | } |
3083 | else | |
3084 | note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | |
3085 | ||
3086 | NOTE_BASIC_BLOCK (note) = bb; | |
3087 | ||
726a989a | 3088 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
242229bb | 3089 | { |
cea49550 | 3090 | basic_block new_bb; |
242229bb | 3091 | |
b5b8b0ac | 3092 | stmt = gsi_stmt (gsi); |
a5883ba0 | 3093 | currently_expanding_gimple_stmt = stmt; |
b5b8b0ac | 3094 | |
242229bb JH |
3095 | /* Expand this statement, then evaluate the resulting RTL and |
3096 | fixup the CFG accordingly. */ | |
726a989a | 3097 | if (gimple_code (stmt) == GIMPLE_COND) |
cea49550 | 3098 | { |
726a989a | 3099 | new_bb = expand_gimple_cond (bb, stmt); |
cea49550 RH |
3100 | if (new_bb) |
3101 | return new_bb; | |
3102 | } | |
b5b8b0ac AO |
3103 | else if (gimple_debug_bind_p (stmt)) |
3104 | { | |
3105 | location_t sloc = get_curr_insn_source_location (); | |
3106 | tree sblock = get_curr_insn_block (); | |
3107 | gimple_stmt_iterator nsi = gsi; | |
3108 | ||
3109 | for (;;) | |
3110 | { | |
3111 | tree var = gimple_debug_bind_get_var (stmt); | |
3112 | tree value; | |
3113 | rtx val; | |
3114 | enum machine_mode mode; | |
3115 | ||
3116 | if (gimple_debug_bind_has_value_p (stmt)) | |
3117 | value = gimple_debug_bind_get_value (stmt); | |
3118 | else | |
3119 | value = NULL_TREE; | |
3120 | ||
3121 | last = get_last_insn (); | |
3122 | ||
3123 | set_curr_insn_source_location (gimple_location (stmt)); | |
3124 | set_curr_insn_block (gimple_block (stmt)); | |
3125 | ||
3126 | if (DECL_P (var)) | |
3127 | mode = DECL_MODE (var); | |
3128 | else | |
3129 | mode = TYPE_MODE (TREE_TYPE (var)); | |
3130 | ||
3131 | val = gen_rtx_VAR_LOCATION | |
3132 | (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3133 | ||
3134 | val = emit_debug_insn (val); | |
3135 | ||
3136 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3137 | { | |
3138 | /* We can't dump the insn with a TREE where an RTX | |
3139 | is expected. */ | |
3140 | INSN_VAR_LOCATION_LOC (val) = const0_rtx; | |
3141 | maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3142 | INSN_VAR_LOCATION_LOC (val) = (rtx)value; | |
3143 | } | |
3144 | ||
3145 | gsi = nsi; | |
3146 | gsi_next (&nsi); | |
3147 | if (gsi_end_p (nsi)) | |
3148 | break; | |
3149 | stmt = gsi_stmt (nsi); | |
3150 | if (!gimple_debug_bind_p (stmt)) | |
3151 | break; | |
3152 | } | |
3153 | ||
3154 | set_curr_insn_source_location (sloc); | |
3155 | set_curr_insn_block (sblock); | |
3156 | } | |
80c7a9eb | 3157 | else |
242229bb | 3158 | { |
726a989a | 3159 | if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
cea49550 RH |
3160 | { |
3161 | bool can_fallthru; | |
3162 | new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | |
3163 | if (new_bb) | |
3164 | { | |
3165 | if (can_fallthru) | |
3166 | bb = new_bb; | |
3167 | else | |
3168 | return new_bb; | |
3169 | } | |
3170 | } | |
4d7a65ea | 3171 | else |
b7211528 | 3172 | { |
4e3825db | 3173 | def_operand_p def_p; |
4e3825db MM |
3174 | def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); |
3175 | ||
3176 | if (def_p != NULL) | |
3177 | { | |
3178 | /* Ignore this stmt if it is in the list of | |
3179 | replaceable expressions. */ | |
3180 | if (SA.values | |
b8698a0f | 3181 | && bitmap_bit_p (SA.values, |
e97809c6 | 3182 | SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) |
4e3825db MM |
3183 | continue; |
3184 | } | |
28ed065e | 3185 | last = expand_gimple_stmt (stmt); |
726a989a | 3186 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
b7211528 | 3187 | } |
242229bb JH |
3188 | } |
3189 | } | |
3190 | ||
a5883ba0 MM |
3191 | currently_expanding_gimple_stmt = NULL; |
3192 | ||
7241571e | 3193 | /* Expand implicit goto and convert goto_locus. */ |
a9b77cd1 ZD |
3194 | FOR_EACH_EDGE (e, ei, bb->succs) |
3195 | { | |
7241571e JJ |
3196 | if (e->goto_locus && e->goto_block) |
3197 | { | |
3198 | set_curr_insn_source_location (e->goto_locus); | |
3199 | set_curr_insn_block (e->goto_block); | |
3200 | e->goto_locus = curr_insn_locator (); | |
3201 | } | |
3202 | e->goto_block = NULL; | |
3203 | if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | |
3204 | { | |
3205 | emit_jump (label_rtx_for_bb (e->dest)); | |
3206 | e->flags &= ~EDGE_FALLTHRU; | |
3207 | } | |
a9b77cd1 ZD |
3208 | } |
3209 | ||
ae761c45 AH |
3210 | /* Expanded RTL can create a jump in the last instruction of block. |
3211 | This later might be assumed to be a jump to successor and break edge insertion. | |
3212 | We need to insert dummy move to prevent this. PR41440. */ | |
3213 | if (single_succ_p (bb) | |
3214 | && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) | |
3215 | && (last = get_last_insn ()) | |
3216 | && JUMP_P (last)) | |
3217 | { | |
3218 | rtx dummy = gen_reg_rtx (SImode); | |
3219 | emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); | |
3220 | } | |
3221 | ||
242229bb JH |
3222 | do_pending_stack_adjust (); |
3223 | ||
3f117656 | 3224 | /* Find the block tail. The last insn in the block is the insn |
242229bb JH |
3225 | before a barrier and/or table jump insn. */ |
3226 | last = get_last_insn (); | |
4b4bf941 | 3227 | if (BARRIER_P (last)) |
242229bb JH |
3228 | last = PREV_INSN (last); |
3229 | if (JUMP_TABLE_DATA_P (last)) | |
3230 | last = PREV_INSN (PREV_INSN (last)); | |
3231 | BB_END (bb) = last; | |
caf93cb0 | 3232 | |
242229bb | 3233 | update_bb_for_insn (bb); |
80c7a9eb | 3234 | |
242229bb JH |
3235 | return bb; |
3236 | } | |
3237 | ||
3238 | ||
3239 | /* Create a basic block for initialization code. */ | |
3240 | ||
3241 | static basic_block | |
3242 | construct_init_block (void) | |
3243 | { | |
3244 | basic_block init_block, first_block; | |
fd44f634 JH |
3245 | edge e = NULL; |
3246 | int flags; | |
275a4187 | 3247 | |
fd44f634 JH |
3248 | /* Multiple entry points not supported yet. */ |
3249 | gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); | |
5e2d947c JH |
3250 | init_rtl_bb_info (ENTRY_BLOCK_PTR); |
3251 | init_rtl_bb_info (EXIT_BLOCK_PTR); | |
3252 | ENTRY_BLOCK_PTR->flags |= BB_RTL; | |
3253 | EXIT_BLOCK_PTR->flags |= BB_RTL; | |
242229bb | 3254 | |
fd44f634 | 3255 | e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); |
275a4187 | 3256 | |
fd44f634 JH |
3257 | /* When entry edge points to first basic block, we don't need jump, |
3258 | otherwise we have to jump into proper target. */ | |
3259 | if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) | |
3260 | { | |
726a989a | 3261 | tree label = gimple_block_label (e->dest); |
fd44f634 JH |
3262 | |
3263 | emit_jump (label_rtx (label)); | |
3264 | flags = 0; | |
275a4187 | 3265 | } |
fd44f634 JH |
3266 | else |
3267 | flags = EDGE_FALLTHRU; | |
242229bb JH |
3268 | |
3269 | init_block = create_basic_block (NEXT_INSN (get_insns ()), | |
3270 | get_last_insn (), | |
3271 | ENTRY_BLOCK_PTR); | |
3272 | init_block->frequency = ENTRY_BLOCK_PTR->frequency; | |
3273 | init_block->count = ENTRY_BLOCK_PTR->count; | |
3274 | if (e) | |
3275 | { | |
3276 | first_block = e->dest; | |
3277 | redirect_edge_succ (e, init_block); | |
fd44f634 | 3278 | e = make_edge (init_block, first_block, flags); |
242229bb JH |
3279 | } |
3280 | else | |
3281 | e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
3282 | e->probability = REG_BR_PROB_BASE; | |
3283 | e->count = ENTRY_BLOCK_PTR->count; | |
3284 | ||
3285 | update_bb_for_insn (init_block); | |
3286 | return init_block; | |
3287 | } | |
3288 | ||
55e092c4 JH |
3289 | /* For each lexical block, set BLOCK_NUMBER to the depth at which it is |
3290 | found in the block tree. */ | |
3291 | ||
3292 | static void | |
3293 | set_block_levels (tree block, int level) | |
3294 | { | |
3295 | while (block) | |
3296 | { | |
3297 | BLOCK_NUMBER (block) = level; | |
3298 | set_block_levels (BLOCK_SUBBLOCKS (block), level + 1); | |
3299 | block = BLOCK_CHAIN (block); | |
3300 | } | |
3301 | } | |
242229bb JH |
3302 | |
3303 | /* Create a block containing landing pads and similar stuff. */ | |
3304 | ||
3305 | static void | |
3306 | construct_exit_block (void) | |
3307 | { | |
3308 | rtx head = get_last_insn (); | |
3309 | rtx end; | |
3310 | basic_block exit_block; | |
628f6a4e BE |
3311 | edge e, e2; |
3312 | unsigned ix; | |
3313 | edge_iterator ei; | |
071a42f9 | 3314 | rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); |
242229bb | 3315 | |
bf08ebeb JH |
3316 | rtl_profile_for_bb (EXIT_BLOCK_PTR); |
3317 | ||
caf93cb0 | 3318 | /* Make sure the locus is set to the end of the function, so that |
242229bb | 3319 | epilogue line numbers and warnings are set properly. */ |
6773e15f | 3320 | if (cfun->function_end_locus != UNKNOWN_LOCATION) |
242229bb JH |
3321 | input_location = cfun->function_end_locus; |
3322 | ||
3323 | /* The following insns belong to the top scope. */ | |
55e092c4 | 3324 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
242229bb | 3325 | |
242229bb JH |
3326 | /* Generate rtl for function exit. */ |
3327 | expand_function_end (); | |
3328 | ||
3329 | end = get_last_insn (); | |
3330 | if (head == end) | |
3331 | return; | |
071a42f9 JH |
3332 | /* While emitting the function end we could move end of the last basic block. |
3333 | */ | |
3334 | BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | |
4b4bf941 | 3335 | while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
242229bb | 3336 | head = NEXT_INSN (head); |
80c7a9eb RH |
3337 | exit_block = create_basic_block (NEXT_INSN (head), end, |
3338 | EXIT_BLOCK_PTR->prev_bb); | |
242229bb JH |
3339 | exit_block->frequency = EXIT_BLOCK_PTR->frequency; |
3340 | exit_block->count = EXIT_BLOCK_PTR->count; | |
628f6a4e BE |
3341 | |
3342 | ix = 0; | |
3343 | while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | |
242229bb | 3344 | { |
8fb790fd | 3345 | e = EDGE_PRED (EXIT_BLOCK_PTR, ix); |
242229bb | 3346 | if (!(e->flags & EDGE_ABNORMAL)) |
628f6a4e BE |
3347 | redirect_edge_succ (e, exit_block); |
3348 | else | |
3349 | ix++; | |
242229bb | 3350 | } |
628f6a4e | 3351 | |
242229bb JH |
3352 | e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
3353 | e->probability = REG_BR_PROB_BASE; | |
3354 | e->count = EXIT_BLOCK_PTR->count; | |
628f6a4e | 3355 | FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) |
242229bb JH |
3356 | if (e2 != e) |
3357 | { | |
c22cacf3 | 3358 | e->count -= e2->count; |
242229bb JH |
3359 | exit_block->count -= e2->count; |
3360 | exit_block->frequency -= EDGE_FREQUENCY (e2); | |
3361 | } | |
3362 | if (e->count < 0) | |
3363 | e->count = 0; | |
3364 | if (exit_block->count < 0) | |
3365 | exit_block->count = 0; | |
3366 | if (exit_block->frequency < 0) | |
3367 | exit_block->frequency = 0; | |
3368 | update_bb_for_insn (exit_block); | |
3369 | } | |
3370 | ||
c22cacf3 | 3371 | /* Helper function for discover_nonconstant_array_refs. |
a1b23b2f UW |
3372 | Look for ARRAY_REF nodes with non-constant indexes and mark them |
3373 | addressable. */ | |
3374 | ||
3375 | static tree | |
3376 | discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | |
3377 | void *data ATTRIBUTE_UNUSED) | |
3378 | { | |
3379 | tree t = *tp; | |
3380 | ||
3381 | if (IS_TYPE_OR_DECL_P (t)) | |
3382 | *walk_subtrees = 0; | |
3383 | else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3384 | { | |
3385 | while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3386 | && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | |
3387 | && (!TREE_OPERAND (t, 2) | |
3388 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3389 | || (TREE_CODE (t) == COMPONENT_REF | |
3390 | && (!TREE_OPERAND (t,2) | |
3391 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3392 | || TREE_CODE (t) == BIT_FIELD_REF | |
3393 | || TREE_CODE (t) == REALPART_EXPR | |
3394 | || TREE_CODE (t) == IMAGPART_EXPR | |
3395 | || TREE_CODE (t) == VIEW_CONVERT_EXPR | |
1043771b | 3396 | || CONVERT_EXPR_P (t)) |
a1b23b2f UW |
3397 | t = TREE_OPERAND (t, 0); |
3398 | ||
3399 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3400 | { | |
3401 | t = get_base_address (t); | |
6f11d690 RG |
3402 | if (t && DECL_P (t) |
3403 | && DECL_MODE (t) != BLKmode) | |
a1b23b2f UW |
3404 | TREE_ADDRESSABLE (t) = 1; |
3405 | } | |
3406 | ||
3407 | *walk_subtrees = 0; | |
3408 | } | |
3409 | ||
3410 | return NULL_TREE; | |
3411 | } | |
3412 | ||
3413 | /* RTL expansion is not able to compile array references with variable | |
3414 | offsets for arrays stored in single register. Discover such | |
3415 | expressions and mark variables as addressable to avoid this | |
3416 | scenario. */ | |
3417 | ||
3418 | static void | |
3419 | discover_nonconstant_array_refs (void) | |
3420 | { | |
3421 | basic_block bb; | |
726a989a | 3422 | gimple_stmt_iterator gsi; |
a1b23b2f UW |
3423 | |
3424 | FOR_EACH_BB (bb) | |
726a989a RB |
3425 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
3426 | { | |
3427 | gimple stmt = gsi_stmt (gsi); | |
3428 | walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | |
3429 | } | |
a1b23b2f UW |
3430 | } |
3431 | ||
2e3f842f L |
3432 | /* This function sets crtl->args.internal_arg_pointer to a virtual |
3433 | register if DRAP is needed. Local register allocator will replace | |
3434 | virtual_incoming_args_rtx with the virtual register. */ | |
3435 | ||
3436 | static void | |
3437 | expand_stack_alignment (void) | |
3438 | { | |
3439 | rtx drap_rtx; | |
e939805b | 3440 | unsigned int preferred_stack_boundary; |
2e3f842f L |
3441 | |
3442 | if (! SUPPORTS_STACK_ALIGNMENT) | |
3443 | return; | |
b8698a0f | 3444 | |
2e3f842f L |
3445 | if (cfun->calls_alloca |
3446 | || cfun->has_nonlocal_label | |
3447 | || crtl->has_nonlocal_goto) | |
3448 | crtl->need_drap = true; | |
3449 | ||
890b9b96 L |
3450 | /* Call update_stack_boundary here again to update incoming stack |
3451 | boundary. It may set incoming stack alignment to a different | |
3452 | value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may | |
3453 | use the minimum incoming stack alignment to check if it is OK | |
3454 | to perform sibcall optimization since sibcall optimization will | |
3455 | only align the outgoing stack to incoming stack boundary. */ | |
3456 | if (targetm.calls.update_stack_boundary) | |
3457 | targetm.calls.update_stack_boundary (); | |
3458 | ||
3459 | /* The incoming stack frame has to be aligned at least at | |
3460 | parm_stack_boundary. */ | |
3461 | gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
2e3f842f | 3462 | |
2e3f842f L |
3463 | /* Update crtl->stack_alignment_estimated and use it later to align |
3464 | stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | |
3465 | exceptions since callgraph doesn't collect incoming stack alignment | |
3466 | in this case. */ | |
3467 | if (flag_non_call_exceptions | |
3468 | && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) | |
3469 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
3470 | else | |
3471 | preferred_stack_boundary = crtl->preferred_stack_boundary; | |
3472 | if (preferred_stack_boundary > crtl->stack_alignment_estimated) | |
3473 | crtl->stack_alignment_estimated = preferred_stack_boundary; | |
3474 | if (preferred_stack_boundary > crtl->stack_alignment_needed) | |
3475 | crtl->stack_alignment_needed = preferred_stack_boundary; | |
3476 | ||
890b9b96 L |
3477 | gcc_assert (crtl->stack_alignment_needed |
3478 | <= crtl->stack_alignment_estimated); | |
3479 | ||
2e3f842f | 3480 | crtl->stack_realign_needed |
e939805b | 3481 | = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; |
d2d93c32 | 3482 | crtl->stack_realign_tried = crtl->stack_realign_needed; |
2e3f842f L |
3483 | |
3484 | crtl->stack_realign_processed = true; | |
3485 | ||
3486 | /* Target has to redefine TARGET_GET_DRAP_RTX to support stack | |
3487 | alignment. */ | |
3488 | gcc_assert (targetm.calls.get_drap_rtx != NULL); | |
b8698a0f | 3489 | drap_rtx = targetm.calls.get_drap_rtx (); |
2e3f842f | 3490 | |
d015f7cc L |
3491 | /* stack_realign_drap and drap_rtx must match. */ |
3492 | gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); | |
3493 | ||
2e3f842f L |
3494 | /* Do nothing if NULL is returned, which means DRAP is not needed. */ |
3495 | if (NULL != drap_rtx) | |
3496 | { | |
3497 | crtl->args.internal_arg_pointer = drap_rtx; | |
3498 | ||
3499 | /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is | |
3500 | needed. */ | |
3501 | fixup_tail_calls (); | |
3502 | } | |
3503 | } | |
3504 | ||
242229bb JH |
3505 | /* Translate the intermediate representation contained in the CFG |
3506 | from GIMPLE trees to RTL. | |
3507 | ||
3508 | We do conversion per basic block and preserve/update the tree CFG. | |
3509 | This implies we have to do some magic as the CFG can simultaneously | |
3510 | consist of basic blocks containing RTL and GIMPLE trees. This can | |
61ada8ae | 3511 | confuse the CFG hooks, so be careful to not manipulate CFG during |
242229bb JH |
3512 | the expansion. */ |
3513 | ||
c2924966 | 3514 | static unsigned int |
726a989a | 3515 | gimple_expand_cfg (void) |
242229bb JH |
3516 | { |
3517 | basic_block bb, init_block; | |
3518 | sbitmap blocks; | |
0ef90296 ZD |
3519 | edge_iterator ei; |
3520 | edge e; | |
4e3825db MM |
3521 | unsigned i; |
3522 | ||
3523 | rewrite_out_of_ssa (&SA); | |
3524 | SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | |
3525 | sizeof (rtx)); | |
242229bb | 3526 | |
4586b4ca SB |
3527 | /* Some backends want to know that we are expanding to RTL. */ |
3528 | currently_expanding_to_rtl = 1; | |
3529 | ||
bf08ebeb JH |
3530 | rtl_profile_for_bb (ENTRY_BLOCK_PTR); |
3531 | ||
55e092c4 | 3532 | insn_locators_alloc (); |
fe8a7779 | 3533 | if (!DECL_IS_BUILTIN (current_function_decl)) |
1751ecd6 AH |
3534 | { |
3535 | /* Eventually, all FEs should explicitly set function_start_locus. */ | |
3536 | if (cfun->function_start_locus == UNKNOWN_LOCATION) | |
3537 | set_curr_insn_source_location | |
3538 | (DECL_SOURCE_LOCATION (current_function_decl)); | |
3539 | else | |
3540 | set_curr_insn_source_location (cfun->function_start_locus); | |
3541 | } | |
55e092c4 JH |
3542 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
3543 | prologue_locator = curr_insn_locator (); | |
3544 | ||
3545 | /* Make sure first insn is a note even if we don't want linenums. | |
3546 | This makes sure the first insn will never be deleted. | |
3547 | Also, final expects a note to appear there. */ | |
3548 | emit_note (NOTE_INSN_DELETED); | |
6429e3be | 3549 | |
a1b23b2f UW |
3550 | /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ |
3551 | discover_nonconstant_array_refs (); | |
3552 | ||
e41b2a33 | 3553 | targetm.expand_to_rtl_hook (); |
cb91fab0 | 3554 | crtl->stack_alignment_needed = STACK_BOUNDARY; |
2e3f842f | 3555 | crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; |
890b9b96 | 3556 | crtl->stack_alignment_estimated = 0; |
cb91fab0 JH |
3557 | crtl->preferred_stack_boundary = STACK_BOUNDARY; |
3558 | cfun->cfg->max_jumptable_ents = 0; | |
3559 | ||
e41b2a33 | 3560 | |
727a31fa | 3561 | /* Expand the variables recorded during gimple lowering. */ |
242229bb JH |
3562 | expand_used_vars (); |
3563 | ||
7d69de61 RH |
3564 | /* Honor stack protection warnings. */ |
3565 | if (warn_stack_protect) | |
3566 | { | |
e3b5732b | 3567 | if (cfun->calls_alloca) |
b8698a0f | 3568 | warning (OPT_Wstack_protector, |
c5409249 | 3569 | "not protecting local variables: variable length buffer"); |
cb91fab0 | 3570 | if (has_short_buffer && !crtl->stack_protect_guard) |
b8698a0f | 3571 | warning (OPT_Wstack_protector, |
c5409249 | 3572 | "not protecting function: no buffer at least %d bytes long", |
7d69de61 RH |
3573 | (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); |
3574 | } | |
3575 | ||
242229bb | 3576 | /* Set up parameters and prepare for return, for the function. */ |
b79c5284 | 3577 | expand_function_start (current_function_decl); |
242229bb | 3578 | |
4e3825db MM |
3579 | /* Now that we also have the parameter RTXs, copy them over to our |
3580 | partitions. */ | |
3581 | for (i = 0; i < SA.map->num_partitions; i++) | |
3582 | { | |
3583 | tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | |
3584 | ||
3585 | if (TREE_CODE (var) != VAR_DECL | |
3586 | && !SA.partition_to_pseudo[i]) | |
3587 | SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | |
3588 | gcc_assert (SA.partition_to_pseudo[i]); | |
eb7adebc MM |
3589 | |
3590 | /* If this decl was marked as living in multiple places, reset | |
3591 | this now to NULL. */ | |
3592 | if (DECL_RTL_IF_SET (var) == pc_rtx) | |
3593 | SET_DECL_RTL (var, NULL); | |
3594 | ||
4e3825db MM |
3595 | /* Some RTL parts really want to look at DECL_RTL(x) when x |
3596 | was a decl marked in REG_ATTR or MEM_ATTR. We could use | |
3597 | SET_DECL_RTL here making this available, but that would mean | |
3598 | to select one of the potentially many RTLs for one DECL. Instead | |
3599 | of doing that we simply reset the MEM_EXPR of the RTL in question, | |
3600 | then nobody can get at it and hence nobody can call DECL_RTL on it. */ | |
3601 | if (!DECL_RTL_SET_P (var)) | |
3602 | { | |
3603 | if (MEM_P (SA.partition_to_pseudo[i])) | |
3604 | set_mem_expr (SA.partition_to_pseudo[i], NULL); | |
3605 | } | |
3606 | } | |
3607 | ||
242229bb JH |
3608 | /* If this function is `main', emit a call to `__main' |
3609 | to run global initializers, etc. */ | |
3610 | if (DECL_NAME (current_function_decl) | |
3611 | && MAIN_NAME_P (DECL_NAME (current_function_decl)) | |
3612 | && DECL_FILE_SCOPE_P (current_function_decl)) | |
3613 | expand_main_function (); | |
3614 | ||
7d69de61 RH |
3615 | /* Initialize the stack_protect_guard field. This must happen after the |
3616 | call to __main (if any) so that the external decl is initialized. */ | |
cb91fab0 | 3617 | if (crtl->stack_protect_guard) |
7d69de61 RH |
3618 | stack_protect_prologue (); |
3619 | ||
4e3825db MM |
3620 | expand_phi_nodes (&SA); |
3621 | ||
3fbd86b1 | 3622 | /* Register rtl specific functions for cfg. */ |
242229bb JH |
3623 | rtl_register_cfg_hooks (); |
3624 | ||
3625 | init_block = construct_init_block (); | |
3626 | ||
0ef90296 | 3627 | /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the |
4e3825db | 3628 | remaining edges later. */ |
0ef90296 ZD |
3629 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
3630 | e->flags &= ~EDGE_EXECUTABLE; | |
3631 | ||
8b11009b | 3632 | lab_rtx_for_bb = pointer_map_create (); |
242229bb | 3633 | FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) |
10d22567 | 3634 | bb = expand_gimple_basic_block (bb); |
bf08ebeb | 3635 | |
b5b8b0ac AO |
3636 | if (MAY_HAVE_DEBUG_INSNS) |
3637 | expand_debug_locations (); | |
3638 | ||
4e3825db MM |
3639 | execute_free_datastructures (); |
3640 | finish_out_of_ssa (&SA); | |
3641 | ||
bf08ebeb JH |
3642 | /* Expansion is used by optimization passes too, set maybe_hot_insn_p |
3643 | conservatively to true until they are all profile aware. */ | |
8b11009b | 3644 | pointer_map_destroy (lab_rtx_for_bb); |
cb91fab0 | 3645 | free_histograms (); |
242229bb JH |
3646 | |
3647 | construct_exit_block (); | |
55e092c4 JH |
3648 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
3649 | insn_locators_finalize (); | |
242229bb | 3650 | |
1d65f45c | 3651 | /* Zap the tree EH table. */ |
e8a2a782 | 3652 | set_eh_throw_stmt_table (cfun, NULL); |
242229bb JH |
3653 | |
3654 | rebuild_jump_labels (get_insns ()); | |
242229bb | 3655 | |
4e3825db MM |
3656 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
3657 | { | |
3658 | edge e; | |
3659 | edge_iterator ei; | |
3660 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3661 | { | |
3662 | if (e->insns.r) | |
3663 | commit_one_edge_insertion (e); | |
3664 | else | |
3665 | ei_next (&ei); | |
3666 | } | |
3667 | } | |
3668 | ||
3669 | /* We're done expanding trees to RTL. */ | |
3670 | currently_expanding_to_rtl = 0; | |
3671 | ||
3672 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) | |
3673 | { | |
3674 | edge e; | |
3675 | edge_iterator ei; | |
3676 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3677 | { | |
3678 | /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | |
3679 | e->flags &= ~EDGE_EXECUTABLE; | |
3680 | ||
3681 | /* At the moment not all abnormal edges match the RTL | |
3682 | representation. It is safe to remove them here as | |
3683 | find_many_sub_basic_blocks will rediscover them. | |
3684 | In the future we should get this fixed properly. */ | |
3685 | if ((e->flags & EDGE_ABNORMAL) | |
3686 | && !(e->flags & EDGE_SIBCALL)) | |
3687 | remove_edge (e); | |
3688 | else | |
3689 | ei_next (&ei); | |
3690 | } | |
3691 | } | |
3692 | ||
242229bb JH |
3693 | blocks = sbitmap_alloc (last_basic_block); |
3694 | sbitmap_ones (blocks); | |
3695 | find_many_sub_basic_blocks (blocks); | |
242229bb | 3696 | sbitmap_free (blocks); |
4e3825db | 3697 | purge_all_dead_edges (); |
242229bb JH |
3698 | |
3699 | compact_blocks (); | |
2e3f842f L |
3700 | |
3701 | expand_stack_alignment (); | |
3702 | ||
242229bb | 3703 | #ifdef ENABLE_CHECKING |
62e5bf5d | 3704 | verify_flow_info (); |
242229bb | 3705 | #endif |
9f8628ba PB |
3706 | |
3707 | /* There's no need to defer outputting this function any more; we | |
3708 | know we want to output it. */ | |
3709 | DECL_DEFER_OUTPUT (current_function_decl) = 0; | |
3710 | ||
3711 | /* Now that we're done expanding trees to RTL, we shouldn't have any | |
3712 | more CONCATs anywhere. */ | |
3713 | generating_concat_p = 0; | |
3714 | ||
b7211528 SB |
3715 | if (dump_file) |
3716 | { | |
3717 | fprintf (dump_file, | |
3718 | "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | |
3719 | /* And the pass manager will dump RTL for us. */ | |
3720 | } | |
ef330312 PB |
3721 | |
3722 | /* If we're emitting a nested function, make sure its parent gets | |
3723 | emitted as well. Doing otherwise confuses debug info. */ | |
c22cacf3 | 3724 | { |
ef330312 PB |
3725 | tree parent; |
3726 | for (parent = DECL_CONTEXT (current_function_decl); | |
c22cacf3 MS |
3727 | parent != NULL_TREE; |
3728 | parent = get_containing_scope (parent)) | |
ef330312 | 3729 | if (TREE_CODE (parent) == FUNCTION_DECL) |
c22cacf3 | 3730 | TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
ef330312 | 3731 | } |
c22cacf3 | 3732 | |
ef330312 PB |
3733 | /* We are now committed to emitting code for this function. Do any |
3734 | preparation, such as emitting abstract debug info for the inline | |
3735 | before it gets mangled by optimization. */ | |
3736 | if (cgraph_function_possibly_inlined_p (current_function_decl)) | |
3737 | (*debug_hooks->outlining_inline_function) (current_function_decl); | |
3738 | ||
3739 | TREE_ASM_WRITTEN (current_function_decl) = 1; | |
4bb1e037 AP |
3740 | |
3741 | /* After expanding, the return labels are no longer needed. */ | |
3742 | return_label = NULL; | |
3743 | naked_return_label = NULL; | |
55e092c4 JH |
3744 | /* Tag the blocks with a depth number so that change_scope can find |
3745 | the common parent easily. */ | |
3746 | set_block_levels (DECL_INITIAL (cfun->decl), 0); | |
bf08ebeb | 3747 | default_rtl_profile (); |
c2924966 | 3748 | return 0; |
242229bb JH |
3749 | } |
3750 | ||
e3b5732b | 3751 | struct rtl_opt_pass pass_expand = |
242229bb | 3752 | { |
8ddbbcae | 3753 | { |
e3b5732b | 3754 | RTL_PASS, |
c22cacf3 | 3755 | "expand", /* name */ |
242229bb | 3756 | NULL, /* gate */ |
726a989a | 3757 | gimple_expand_cfg, /* execute */ |
242229bb JH |
3758 | NULL, /* sub */ |
3759 | NULL, /* next */ | |
3760 | 0, /* static_pass_number */ | |
c22cacf3 | 3761 | TV_EXPAND, /* tv_id */ |
688a482d RG |
3762 | PROP_ssa | PROP_gimple_leh | PROP_cfg |
3763 | | PROP_gimple_lcx, /* properties_required */ | |
242229bb | 3764 | PROP_rtl, /* properties_provided */ |
4e3825db MM |
3765 | PROP_ssa | PROP_trees, /* properties_destroyed */ |
3766 | TODO_verify_ssa | TODO_verify_flow | |
3767 | | TODO_verify_stmts, /* todo_flags_start */ | |
3768 | TODO_dump_func | |
3769 | | TODO_ggc_collect /* todo_flags_finish */ | |
8ddbbcae | 3770 | } |
242229bb | 3771 | }; |