]>
Commit | Line | Data |
---|---|---|
242229bb | 1 | /* A pass for lowering trees to RTL. |
7604eb4e | 2 | Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010 |
66647d44 | 3 | Free Software Foundation, Inc. |
242229bb JH |
4 | |
5 | This file is part of GCC. | |
6 | ||
7 | GCC is free software; you can redistribute it and/or modify | |
8 | it under the terms of the GNU General Public License as published by | |
9dcd6f09 | 9 | the Free Software Foundation; either version 3, or (at your option) |
242229bb JH |
10 | any later version. |
11 | ||
12 | GCC is distributed in the hope that it will be useful, | |
13 | but WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 | MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | |
15 | GNU General Public License for more details. | |
16 | ||
17 | You should have received a copy of the GNU General Public License | |
9dcd6f09 NC |
18 | along with GCC; see the file COPYING3. If not see |
19 | <http://www.gnu.org/licenses/>. */ | |
242229bb JH |
20 | |
21 | #include "config.h" | |
22 | #include "system.h" | |
23 | #include "coretypes.h" | |
24 | #include "tm.h" | |
25 | #include "tree.h" | |
26 | #include "rtl.h" | |
27 | #include "tm_p.h" | |
28 | #include "basic-block.h" | |
29 | #include "function.h" | |
30 | #include "expr.h" | |
31 | #include "langhooks.h" | |
32 | #include "tree-flow.h" | |
33 | #include "timevar.h" | |
34 | #include "tree-dump.h" | |
35 | #include "tree-pass.h" | |
36 | #include "except.h" | |
37 | #include "flags.h" | |
1f6d3a08 | 38 | #include "diagnostic.h" |
cf835838 JM |
39 | #include "tree-pretty-print.h" |
40 | #include "gimple-pretty-print.h" | |
1f6d3a08 | 41 | #include "toplev.h" |
ef330312 | 42 | #include "debug.h" |
7d69de61 | 43 | #include "params.h" |
ff28a94d | 44 | #include "tree-inline.h" |
6946b3f7 | 45 | #include "value-prof.h" |
e41b2a33 | 46 | #include "target.h" |
4e3825db | 47 | #include "ssaexpand.h" |
7a8cba34 SB |
48 | #include "bitmap.h" |
49 | #include "sbitmap.h" | |
726a989a | 50 | |
4e3825db MM |
51 | /* This variable holds information helping the rewriting of SSA trees |
52 | into RTL. */ | |
53 | struct ssaexpand SA; | |
54 | ||
a5883ba0 MM |
55 | /* This variable holds the currently expanded gimple statement for purposes |
56 | of comminucating the profile info to the builtin expanders. */ | |
57 | gimple currently_expanding_gimple_stmt; | |
58 | ||
726a989a RB |
59 | /* Return an expression tree corresponding to the RHS of GIMPLE |
60 | statement STMT. */ | |
61 | ||
62 | tree | |
63 | gimple_assign_rhs_to_tree (gimple stmt) | |
64 | { | |
65 | tree t; | |
82d6e6fc | 66 | enum gimple_rhs_class grhs_class; |
b8698a0f | 67 | |
82d6e6fc | 68 | grhs_class = get_gimple_rhs_class (gimple_expr_code (stmt)); |
726a989a | 69 | |
82d6e6fc | 70 | if (grhs_class == GIMPLE_BINARY_RHS) |
726a989a RB |
71 | t = build2 (gimple_assign_rhs_code (stmt), |
72 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
73 | gimple_assign_rhs1 (stmt), | |
74 | gimple_assign_rhs2 (stmt)); | |
82d6e6fc | 75 | else if (grhs_class == GIMPLE_UNARY_RHS) |
726a989a RB |
76 | t = build1 (gimple_assign_rhs_code (stmt), |
77 | TREE_TYPE (gimple_assign_lhs (stmt)), | |
78 | gimple_assign_rhs1 (stmt)); | |
82d6e6fc | 79 | else if (grhs_class == GIMPLE_SINGLE_RHS) |
b5b8b0ac AO |
80 | { |
81 | t = gimple_assign_rhs1 (stmt); | |
82 | /* Avoid modifying this tree in place below. */ | |
d0ed412a JJ |
83 | if ((gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t) |
84 | && gimple_location (stmt) != EXPR_LOCATION (t)) | |
85 | || (gimple_block (stmt) | |
86 | && currently_expanding_to_rtl | |
87 | && EXPR_P (t) | |
88 | && gimple_block (stmt) != TREE_BLOCK (t))) | |
b5b8b0ac AO |
89 | t = copy_node (t); |
90 | } | |
726a989a RB |
91 | else |
92 | gcc_unreachable (); | |
93 | ||
f5045c96 AM |
94 | if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (t)) |
95 | SET_EXPR_LOCATION (t, gimple_location (stmt)); | |
d0ed412a JJ |
96 | if (gimple_block (stmt) && currently_expanding_to_rtl && EXPR_P (t)) |
97 | TREE_BLOCK (t) = gimple_block (stmt); | |
f5045c96 | 98 | |
726a989a RB |
99 | return t; |
100 | } | |
101 | ||
726a989a | 102 | |
1f6d3a08 RH |
103 | #ifndef STACK_ALIGNMENT_NEEDED |
104 | #define STACK_ALIGNMENT_NEEDED 1 | |
105 | #endif | |
106 | ||
4e3825db MM |
107 | #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x) |
108 | ||
109 | /* Associate declaration T with storage space X. If T is no | |
110 | SSA name this is exactly SET_DECL_RTL, otherwise make the | |
111 | partition of T associated with X. */ | |
112 | static inline void | |
113 | set_rtl (tree t, rtx x) | |
114 | { | |
115 | if (TREE_CODE (t) == SSA_NAME) | |
116 | { | |
117 | SA.partition_to_pseudo[var_to_partition (SA.map, t)] = x; | |
118 | if (x && !MEM_P (x)) | |
119 | set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t), x); | |
eb7adebc MM |
120 | /* For the benefit of debug information at -O0 (where vartracking |
121 | doesn't run) record the place also in the base DECL if it's | |
122 | a normal variable (not a parameter). */ | |
123 | if (x && x != pc_rtx && TREE_CODE (SSA_NAME_VAR (t)) == VAR_DECL) | |
124 | { | |
125 | tree var = SSA_NAME_VAR (t); | |
126 | /* If we don't yet have something recorded, just record it now. */ | |
127 | if (!DECL_RTL_SET_P (var)) | |
128 | SET_DECL_RTL (var, x); | |
129 | /* If we have it set alrady to "multiple places" don't | |
130 | change this. */ | |
131 | else if (DECL_RTL (var) == pc_rtx) | |
132 | ; | |
133 | /* If we have something recorded and it's not the same place | |
134 | as we want to record now, we have multiple partitions for the | |
135 | same base variable, with different places. We can't just | |
136 | randomly chose one, hence we have to say that we don't know. | |
137 | This only happens with optimization, and there var-tracking | |
138 | will figure out the right thing. */ | |
139 | else if (DECL_RTL (var) != x) | |
140 | SET_DECL_RTL (var, pc_rtx); | |
141 | } | |
4e3825db MM |
142 | } |
143 | else | |
144 | SET_DECL_RTL (t, x); | |
145 | } | |
1f6d3a08 RH |
146 | |
147 | /* This structure holds data relevant to one variable that will be | |
148 | placed in a stack slot. */ | |
149 | struct stack_var | |
150 | { | |
151 | /* The Variable. */ | |
152 | tree decl; | |
153 | ||
154 | /* The offset of the variable. During partitioning, this is the | |
155 | offset relative to the partition. After partitioning, this | |
156 | is relative to the stack frame. */ | |
157 | HOST_WIDE_INT offset; | |
158 | ||
159 | /* Initially, the size of the variable. Later, the size of the partition, | |
160 | if this variable becomes it's partition's representative. */ | |
161 | HOST_WIDE_INT size; | |
162 | ||
163 | /* The *byte* alignment required for this variable. Or as, with the | |
164 | size, the alignment for this partition. */ | |
165 | unsigned int alignb; | |
166 | ||
167 | /* The partition representative. */ | |
168 | size_t representative; | |
169 | ||
170 | /* The next stack variable in the partition, or EOC. */ | |
171 | size_t next; | |
2bdbbe94 MM |
172 | |
173 | /* The numbers of conflicting stack variables. */ | |
174 | bitmap conflicts; | |
1f6d3a08 RH |
175 | }; |
176 | ||
177 | #define EOC ((size_t)-1) | |
178 | ||
179 | /* We have an array of such objects while deciding allocation. */ | |
180 | static struct stack_var *stack_vars; | |
181 | static size_t stack_vars_alloc; | |
182 | static size_t stack_vars_num; | |
183 | ||
fa10beec | 184 | /* An array of indices such that stack_vars[stack_vars_sorted[i]].size |
1f6d3a08 RH |
185 | is non-decreasing. */ |
186 | static size_t *stack_vars_sorted; | |
187 | ||
1f6d3a08 RH |
188 | /* The phase of the stack frame. This is the known misalignment of |
189 | virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is, | |
190 | (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */ | |
191 | static int frame_phase; | |
192 | ||
7d69de61 RH |
193 | /* Used during expand_used_vars to remember if we saw any decls for |
194 | which we'd like to enable stack smashing protection. */ | |
195 | static bool has_protected_decls; | |
196 | ||
197 | /* Used during expand_used_vars. Remember if we say a character buffer | |
198 | smaller than our cutoff threshold. Used for -Wstack-protector. */ | |
199 | static bool has_short_buffer; | |
1f6d3a08 RH |
200 | |
201 | /* Discover the byte alignment to use for DECL. Ignore alignment | |
202 | we can't do with expected alignment of the stack boundary. */ | |
203 | ||
204 | static unsigned int | |
205 | get_decl_align_unit (tree decl) | |
206 | { | |
207 | unsigned int align; | |
208 | ||
9bfaf89d | 209 | align = LOCAL_DECL_ALIGNMENT (decl); |
2e3f842f L |
210 | |
211 | if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
212 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
213 | ||
214 | if (SUPPORTS_STACK_ALIGNMENT) | |
215 | { | |
216 | if (crtl->stack_alignment_estimated < align) | |
217 | { | |
218 | gcc_assert(!crtl->stack_realign_processed); | |
219 | crtl->stack_alignment_estimated = align; | |
220 | } | |
221 | } | |
222 | ||
223 | /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted. | |
224 | So here we only make sure stack_alignment_needed >= align. */ | |
cb91fab0 JH |
225 | if (crtl->stack_alignment_needed < align) |
226 | crtl->stack_alignment_needed = align; | |
f85882d8 JY |
227 | if (crtl->max_used_stack_slot_alignment < align) |
228 | crtl->max_used_stack_slot_alignment = align; | |
1f6d3a08 RH |
229 | |
230 | return align / BITS_PER_UNIT; | |
231 | } | |
232 | ||
233 | /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame. | |
234 | Return the frame offset. */ | |
235 | ||
236 | static HOST_WIDE_INT | |
237 | alloc_stack_frame_space (HOST_WIDE_INT size, HOST_WIDE_INT align) | |
238 | { | |
239 | HOST_WIDE_INT offset, new_frame_offset; | |
240 | ||
241 | new_frame_offset = frame_offset; | |
242 | if (FRAME_GROWS_DOWNWARD) | |
243 | { | |
244 | new_frame_offset -= size + frame_phase; | |
245 | new_frame_offset &= -align; | |
246 | new_frame_offset += frame_phase; | |
247 | offset = new_frame_offset; | |
248 | } | |
249 | else | |
250 | { | |
251 | new_frame_offset -= frame_phase; | |
252 | new_frame_offset += align - 1; | |
253 | new_frame_offset &= -align; | |
254 | new_frame_offset += frame_phase; | |
255 | offset = new_frame_offset; | |
256 | new_frame_offset += size; | |
257 | } | |
258 | frame_offset = new_frame_offset; | |
259 | ||
9fb798d7 EB |
260 | if (frame_offset_overflow (frame_offset, cfun->decl)) |
261 | frame_offset = offset = 0; | |
262 | ||
1f6d3a08 RH |
263 | return offset; |
264 | } | |
265 | ||
266 | /* Accumulate DECL into STACK_VARS. */ | |
267 | ||
268 | static void | |
269 | add_stack_var (tree decl) | |
270 | { | |
271 | if (stack_vars_num >= stack_vars_alloc) | |
272 | { | |
273 | if (stack_vars_alloc) | |
274 | stack_vars_alloc = stack_vars_alloc * 3 / 2; | |
275 | else | |
276 | stack_vars_alloc = 32; | |
277 | stack_vars | |
278 | = XRESIZEVEC (struct stack_var, stack_vars, stack_vars_alloc); | |
279 | } | |
280 | stack_vars[stack_vars_num].decl = decl; | |
281 | stack_vars[stack_vars_num].offset = 0; | |
4e3825db MM |
282 | stack_vars[stack_vars_num].size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl)), 1); |
283 | stack_vars[stack_vars_num].alignb = get_decl_align_unit (SSAVAR (decl)); | |
1f6d3a08 RH |
284 | |
285 | /* All variables are initially in their own partition. */ | |
286 | stack_vars[stack_vars_num].representative = stack_vars_num; | |
287 | stack_vars[stack_vars_num].next = EOC; | |
288 | ||
2bdbbe94 MM |
289 | /* All variables initially conflict with no other. */ |
290 | stack_vars[stack_vars_num].conflicts = NULL; | |
291 | ||
1f6d3a08 | 292 | /* Ensure that this decl doesn't get put onto the list twice. */ |
4e3825db | 293 | set_rtl (decl, pc_rtx); |
1f6d3a08 RH |
294 | |
295 | stack_vars_num++; | |
296 | } | |
297 | ||
1f6d3a08 RH |
298 | /* Make the decls associated with luid's X and Y conflict. */ |
299 | ||
300 | static void | |
301 | add_stack_var_conflict (size_t x, size_t y) | |
302 | { | |
2bdbbe94 MM |
303 | struct stack_var *a = &stack_vars[x]; |
304 | struct stack_var *b = &stack_vars[y]; | |
305 | if (!a->conflicts) | |
306 | a->conflicts = BITMAP_ALLOC (NULL); | |
307 | if (!b->conflicts) | |
308 | b->conflicts = BITMAP_ALLOC (NULL); | |
309 | bitmap_set_bit (a->conflicts, y); | |
310 | bitmap_set_bit (b->conflicts, x); | |
1f6d3a08 RH |
311 | } |
312 | ||
313 | /* Check whether the decls associated with luid's X and Y conflict. */ | |
314 | ||
315 | static bool | |
316 | stack_var_conflict_p (size_t x, size_t y) | |
317 | { | |
2bdbbe94 MM |
318 | struct stack_var *a = &stack_vars[x]; |
319 | struct stack_var *b = &stack_vars[y]; | |
320 | if (!a->conflicts || !b->conflicts) | |
321 | return false; | |
322 | return bitmap_bit_p (a->conflicts, y); | |
1f6d3a08 | 323 | } |
b8698a0f | 324 | |
d239ed56 SB |
325 | /* Returns true if TYPE is or contains a union type. */ |
326 | ||
327 | static bool | |
328 | aggregate_contains_union_type (tree type) | |
329 | { | |
330 | tree field; | |
331 | ||
332 | if (TREE_CODE (type) == UNION_TYPE | |
333 | || TREE_CODE (type) == QUAL_UNION_TYPE) | |
334 | return true; | |
335 | if (TREE_CODE (type) == ARRAY_TYPE) | |
336 | return aggregate_contains_union_type (TREE_TYPE (type)); | |
337 | if (TREE_CODE (type) != RECORD_TYPE) | |
338 | return false; | |
339 | ||
340 | for (field = TYPE_FIELDS (type); field; field = TREE_CHAIN (field)) | |
341 | if (TREE_CODE (field) == FIELD_DECL) | |
342 | if (aggregate_contains_union_type (TREE_TYPE (field))) | |
343 | return true; | |
344 | ||
345 | return false; | |
346 | } | |
347 | ||
1f6d3a08 RH |
348 | /* A subroutine of expand_used_vars. If two variables X and Y have alias |
349 | sets that do not conflict, then do add a conflict for these variables | |
d239ed56 SB |
350 | in the interference graph. We also need to make sure to add conflicts |
351 | for union containing structures. Else RTL alias analysis comes along | |
352 | and due to type based aliasing rules decides that for two overlapping | |
353 | union temporaries { short s; int i; } accesses to the same mem through | |
354 | different types may not alias and happily reorders stores across | |
355 | life-time boundaries of the temporaries (See PR25654). | |
356 | We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */ | |
1f6d3a08 RH |
357 | |
358 | static void | |
359 | add_alias_set_conflicts (void) | |
360 | { | |
361 | size_t i, j, n = stack_vars_num; | |
362 | ||
363 | for (i = 0; i < n; ++i) | |
364 | { | |
a4d25453 RH |
365 | tree type_i = TREE_TYPE (stack_vars[i].decl); |
366 | bool aggr_i = AGGREGATE_TYPE_P (type_i); | |
d239ed56 | 367 | bool contains_union; |
1f6d3a08 | 368 | |
d239ed56 | 369 | contains_union = aggregate_contains_union_type (type_i); |
1f6d3a08 RH |
370 | for (j = 0; j < i; ++j) |
371 | { | |
a4d25453 RH |
372 | tree type_j = TREE_TYPE (stack_vars[j].decl); |
373 | bool aggr_j = AGGREGATE_TYPE_P (type_j); | |
d239ed56 SB |
374 | if (aggr_i != aggr_j |
375 | /* Either the objects conflict by means of type based | |
376 | aliasing rules, or we need to add a conflict. */ | |
377 | || !objects_must_conflict_p (type_i, type_j) | |
378 | /* In case the types do not conflict ensure that access | |
379 | to elements will conflict. In case of unions we have | |
380 | to be careful as type based aliasing rules may say | |
381 | access to the same memory does not conflict. So play | |
382 | safe and add a conflict in this case. */ | |
383 | || contains_union) | |
1f6d3a08 RH |
384 | add_stack_var_conflict (i, j); |
385 | } | |
386 | } | |
387 | } | |
388 | ||
389 | /* A subroutine of partition_stack_vars. A comparison function for qsort, | |
4e3825db | 390 | sorting an array of indices by the size and type of the object. */ |
1f6d3a08 RH |
391 | |
392 | static int | |
393 | stack_var_size_cmp (const void *a, const void *b) | |
394 | { | |
395 | HOST_WIDE_INT sa = stack_vars[*(const size_t *)a].size; | |
396 | HOST_WIDE_INT sb = stack_vars[*(const size_t *)b].size; | |
4e3825db MM |
397 | tree decla, declb; |
398 | unsigned int uida, uidb; | |
1f6d3a08 RH |
399 | |
400 | if (sa < sb) | |
401 | return -1; | |
402 | if (sa > sb) | |
403 | return 1; | |
4e3825db MM |
404 | decla = stack_vars[*(const size_t *)a].decl; |
405 | declb = stack_vars[*(const size_t *)b].decl; | |
406 | /* For stack variables of the same size use and id of the decls | |
407 | to make the sort stable. Two SSA names are compared by their | |
408 | version, SSA names come before non-SSA names, and two normal | |
409 | decls are compared by their DECL_UID. */ | |
410 | if (TREE_CODE (decla) == SSA_NAME) | |
411 | { | |
412 | if (TREE_CODE (declb) == SSA_NAME) | |
413 | uida = SSA_NAME_VERSION (decla), uidb = SSA_NAME_VERSION (declb); | |
414 | else | |
415 | return -1; | |
416 | } | |
417 | else if (TREE_CODE (declb) == SSA_NAME) | |
418 | return 1; | |
419 | else | |
420 | uida = DECL_UID (decla), uidb = DECL_UID (declb); | |
79f802f5 RG |
421 | if (uida < uidb) |
422 | return -1; | |
423 | if (uida > uidb) | |
424 | return 1; | |
1f6d3a08 RH |
425 | return 0; |
426 | } | |
427 | ||
55b34b5f RG |
428 | |
429 | /* If the points-to solution *PI points to variables that are in a partition | |
430 | together with other variables add all partition members to the pointed-to | |
431 | variables bitmap. */ | |
432 | ||
433 | static void | |
434 | add_partitioned_vars_to_ptset (struct pt_solution *pt, | |
435 | struct pointer_map_t *decls_to_partitions, | |
436 | struct pointer_set_t *visited, bitmap temp) | |
437 | { | |
438 | bitmap_iterator bi; | |
439 | unsigned i; | |
440 | bitmap *part; | |
441 | ||
442 | if (pt->anything | |
443 | || pt->vars == NULL | |
444 | /* The pointed-to vars bitmap is shared, it is enough to | |
445 | visit it once. */ | |
446 | || pointer_set_insert(visited, pt->vars)) | |
447 | return; | |
448 | ||
449 | bitmap_clear (temp); | |
450 | ||
451 | /* By using a temporary bitmap to store all members of the partitions | |
452 | we have to add we make sure to visit each of the partitions only | |
453 | once. */ | |
454 | EXECUTE_IF_SET_IN_BITMAP (pt->vars, 0, i, bi) | |
455 | if ((!temp | |
456 | || !bitmap_bit_p (temp, i)) | |
457 | && (part = (bitmap *) pointer_map_contains (decls_to_partitions, | |
458 | (void *)(size_t) i))) | |
459 | bitmap_ior_into (temp, *part); | |
460 | if (!bitmap_empty_p (temp)) | |
461 | bitmap_ior_into (pt->vars, temp); | |
462 | } | |
463 | ||
464 | /* Update points-to sets based on partition info, so we can use them on RTL. | |
465 | The bitmaps representing stack partitions will be saved until expand, | |
466 | where partitioned decls used as bases in memory expressions will be | |
467 | rewritten. */ | |
468 | ||
469 | static void | |
470 | update_alias_info_with_stack_vars (void) | |
471 | { | |
472 | struct pointer_map_t *decls_to_partitions = NULL; | |
473 | size_t i, j; | |
474 | tree var = NULL_TREE; | |
475 | ||
476 | for (i = 0; i < stack_vars_num; i++) | |
477 | { | |
478 | bitmap part = NULL; | |
479 | tree name; | |
480 | struct ptr_info_def *pi; | |
481 | ||
482 | /* Not interested in partitions with single variable. */ | |
483 | if (stack_vars[i].representative != i | |
484 | || stack_vars[i].next == EOC) | |
485 | continue; | |
486 | ||
487 | if (!decls_to_partitions) | |
488 | { | |
489 | decls_to_partitions = pointer_map_create (); | |
490 | cfun->gimple_df->decls_to_pointers = pointer_map_create (); | |
491 | } | |
492 | ||
493 | /* Create an SSA_NAME that points to the partition for use | |
494 | as base during alias-oracle queries on RTL for bases that | |
495 | have been partitioned. */ | |
496 | if (var == NULL_TREE) | |
497 | var = create_tmp_var (ptr_type_node, NULL); | |
498 | name = make_ssa_name (var, NULL); | |
499 | ||
500 | /* Create bitmaps representing partitions. They will be used for | |
501 | points-to sets later, so use GGC alloc. */ | |
502 | part = BITMAP_GGC_ALLOC (); | |
503 | for (j = i; j != EOC; j = stack_vars[j].next) | |
504 | { | |
505 | tree decl = stack_vars[j].decl; | |
25a6a873 | 506 | unsigned int uid = DECL_PT_UID (decl); |
55b34b5f RG |
507 | /* We should never end up partitioning SSA names (though they |
508 | may end up on the stack). Neither should we allocate stack | |
509 | space to something that is unused and thus unreferenced. */ | |
510 | gcc_assert (DECL_P (decl) | |
25a6a873 | 511 | && referenced_var_lookup (DECL_UID (decl))); |
55b34b5f RG |
512 | bitmap_set_bit (part, uid); |
513 | *((bitmap *) pointer_map_insert (decls_to_partitions, | |
514 | (void *)(size_t) uid)) = part; | |
515 | *((tree *) pointer_map_insert (cfun->gimple_df->decls_to_pointers, | |
516 | decl)) = name; | |
517 | } | |
518 | ||
519 | /* Make the SSA name point to all partition members. */ | |
520 | pi = get_ptr_info (name); | |
25a6a873 | 521 | pt_solution_set (&pi->pt, part, false, false); |
55b34b5f RG |
522 | } |
523 | ||
524 | /* Make all points-to sets that contain one member of a partition | |
525 | contain all members of the partition. */ | |
526 | if (decls_to_partitions) | |
527 | { | |
528 | unsigned i; | |
529 | struct pointer_set_t *visited = pointer_set_create (); | |
530 | bitmap temp = BITMAP_ALLOC (NULL); | |
531 | ||
532 | for (i = 1; i < num_ssa_names; i++) | |
533 | { | |
534 | tree name = ssa_name (i); | |
535 | struct ptr_info_def *pi; | |
536 | ||
537 | if (name | |
538 | && POINTER_TYPE_P (TREE_TYPE (name)) | |
539 | && ((pi = SSA_NAME_PTR_INFO (name)) != NULL)) | |
540 | add_partitioned_vars_to_ptset (&pi->pt, decls_to_partitions, | |
541 | visited, temp); | |
542 | } | |
543 | ||
544 | add_partitioned_vars_to_ptset (&cfun->gimple_df->escaped, | |
545 | decls_to_partitions, visited, temp); | |
55b34b5f RG |
546 | |
547 | pointer_set_destroy (visited); | |
548 | pointer_map_destroy (decls_to_partitions); | |
549 | BITMAP_FREE (temp); | |
550 | } | |
551 | } | |
552 | ||
1f6d3a08 RH |
553 | /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND |
554 | partitioning algorithm. Partitions A and B are known to be non-conflicting. | |
555 | Merge them into a single partition A. | |
556 | ||
557 | At the same time, add OFFSET to all variables in partition B. At the end | |
558 | of the partitioning process we've have a nice block easy to lay out within | |
559 | the stack frame. */ | |
560 | ||
561 | static void | |
562 | union_stack_vars (size_t a, size_t b, HOST_WIDE_INT offset) | |
563 | { | |
564 | size_t i, last; | |
2bdbbe94 MM |
565 | struct stack_var *vb = &stack_vars[b]; |
566 | bitmap_iterator bi; | |
567 | unsigned u; | |
1f6d3a08 RH |
568 | |
569 | /* Update each element of partition B with the given offset, | |
570 | and merge them into partition A. */ | |
571 | for (last = i = b; i != EOC; last = i, i = stack_vars[i].next) | |
572 | { | |
573 | stack_vars[i].offset += offset; | |
574 | stack_vars[i].representative = a; | |
575 | } | |
576 | stack_vars[last].next = stack_vars[a].next; | |
577 | stack_vars[a].next = b; | |
578 | ||
579 | /* Update the required alignment of partition A to account for B. */ | |
580 | if (stack_vars[a].alignb < stack_vars[b].alignb) | |
581 | stack_vars[a].alignb = stack_vars[b].alignb; | |
582 | ||
583 | /* Update the interference graph and merge the conflicts. */ | |
2bdbbe94 MM |
584 | if (vb->conflicts) |
585 | { | |
586 | EXECUTE_IF_SET_IN_BITMAP (vb->conflicts, 0, u, bi) | |
587 | add_stack_var_conflict (a, stack_vars[u].representative); | |
588 | BITMAP_FREE (vb->conflicts); | |
589 | } | |
1f6d3a08 RH |
590 | } |
591 | ||
592 | /* A subroutine of expand_used_vars. Binpack the variables into | |
593 | partitions constrained by the interference graph. The overall | |
594 | algorithm used is as follows: | |
595 | ||
596 | Sort the objects by size. | |
597 | For each object A { | |
598 | S = size(A) | |
599 | O = 0 | |
600 | loop { | |
601 | Look for the largest non-conflicting object B with size <= S. | |
602 | UNION (A, B) | |
603 | offset(B) = O | |
604 | O += size(B) | |
605 | S -= size(B) | |
606 | } | |
607 | } | |
608 | */ | |
609 | ||
610 | static void | |
611 | partition_stack_vars (void) | |
612 | { | |
613 | size_t si, sj, n = stack_vars_num; | |
614 | ||
615 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
616 | for (si = 0; si < n; ++si) | |
617 | stack_vars_sorted[si] = si; | |
618 | ||
619 | if (n == 1) | |
620 | return; | |
621 | ||
622 | qsort (stack_vars_sorted, n, sizeof (size_t), stack_var_size_cmp); | |
623 | ||
1f6d3a08 RH |
624 | for (si = 0; si < n; ++si) |
625 | { | |
626 | size_t i = stack_vars_sorted[si]; | |
627 | HOST_WIDE_INT isize = stack_vars[i].size; | |
628 | HOST_WIDE_INT offset = 0; | |
629 | ||
630 | for (sj = si; sj-- > 0; ) | |
631 | { | |
632 | size_t j = stack_vars_sorted[sj]; | |
633 | HOST_WIDE_INT jsize = stack_vars[j].size; | |
634 | unsigned int jalign = stack_vars[j].alignb; | |
635 | ||
636 | /* Ignore objects that aren't partition representatives. */ | |
637 | if (stack_vars[j].representative != j) | |
638 | continue; | |
639 | ||
640 | /* Ignore objects too large for the remaining space. */ | |
641 | if (isize < jsize) | |
642 | continue; | |
643 | ||
644 | /* Ignore conflicting objects. */ | |
645 | if (stack_var_conflict_p (i, j)) | |
646 | continue; | |
647 | ||
648 | /* Refine the remaining space check to include alignment. */ | |
649 | if (offset & (jalign - 1)) | |
650 | { | |
651 | HOST_WIDE_INT toff = offset; | |
652 | toff += jalign - 1; | |
653 | toff &= -(HOST_WIDE_INT)jalign; | |
654 | if (isize - (toff - offset) < jsize) | |
655 | continue; | |
656 | ||
657 | isize -= toff - offset; | |
658 | offset = toff; | |
659 | } | |
660 | ||
661 | /* UNION the objects, placing J at OFFSET. */ | |
662 | union_stack_vars (i, j, offset); | |
663 | ||
664 | isize -= jsize; | |
665 | if (isize == 0) | |
666 | break; | |
667 | } | |
668 | } | |
55b34b5f | 669 | |
0b200b80 RG |
670 | if (optimize) |
671 | update_alias_info_with_stack_vars (); | |
1f6d3a08 RH |
672 | } |
673 | ||
674 | /* A debugging aid for expand_used_vars. Dump the generated partitions. */ | |
675 | ||
676 | static void | |
677 | dump_stack_var_partition (void) | |
678 | { | |
679 | size_t si, i, j, n = stack_vars_num; | |
680 | ||
681 | for (si = 0; si < n; ++si) | |
682 | { | |
683 | i = stack_vars_sorted[si]; | |
684 | ||
685 | /* Skip variables that aren't partition representatives, for now. */ | |
686 | if (stack_vars[i].representative != i) | |
687 | continue; | |
688 | ||
689 | fprintf (dump_file, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC | |
690 | " align %u\n", (unsigned long) i, stack_vars[i].size, | |
691 | stack_vars[i].alignb); | |
692 | ||
693 | for (j = i; j != EOC; j = stack_vars[j].next) | |
694 | { | |
695 | fputc ('\t', dump_file); | |
696 | print_generic_expr (dump_file, stack_vars[j].decl, dump_flags); | |
697 | fprintf (dump_file, ", offset " HOST_WIDE_INT_PRINT_DEC "\n", | |
1c50a20a | 698 | stack_vars[j].offset); |
1f6d3a08 RH |
699 | } |
700 | } | |
701 | } | |
702 | ||
703 | /* Assign rtl to DECL at frame offset OFFSET. */ | |
704 | ||
705 | static void | |
706 | expand_one_stack_var_at (tree decl, HOST_WIDE_INT offset) | |
707 | { | |
2ac26e15 L |
708 | /* Alignment is unsigned. */ |
709 | unsigned HOST_WIDE_INT align; | |
1f6d3a08 | 710 | rtx x; |
c22cacf3 | 711 | |
1f6d3a08 RH |
712 | /* If this fails, we've overflowed the stack frame. Error nicely? */ |
713 | gcc_assert (offset == trunc_int_for_mode (offset, Pmode)); | |
714 | ||
715 | x = plus_constant (virtual_stack_vars_rtx, offset); | |
4e3825db | 716 | x = gen_rtx_MEM (DECL_MODE (SSAVAR (decl)), x); |
1f6d3a08 | 717 | |
4e3825db MM |
718 | if (TREE_CODE (decl) != SSA_NAME) |
719 | { | |
720 | /* Set alignment we actually gave this decl if it isn't an SSA name. | |
721 | If it is we generate stack slots only accidentally so it isn't as | |
722 | important, we'll simply use the alignment that is already set. */ | |
723 | offset -= frame_phase; | |
724 | align = offset & -offset; | |
725 | align *= BITS_PER_UNIT; | |
726 | if (align == 0) | |
727 | align = STACK_BOUNDARY; | |
728 | else if (align > MAX_SUPPORTED_STACK_ALIGNMENT) | |
729 | align = MAX_SUPPORTED_STACK_ALIGNMENT; | |
730 | ||
731 | DECL_ALIGN (decl) = align; | |
732 | DECL_USER_ALIGN (decl) = 0; | |
733 | } | |
734 | ||
735 | set_mem_attributes (x, SSAVAR (decl), true); | |
736 | set_rtl (decl, x); | |
1f6d3a08 RH |
737 | } |
738 | ||
739 | /* A subroutine of expand_used_vars. Give each partition representative | |
740 | a unique location within the stack frame. Update each partition member | |
741 | with that location. */ | |
742 | ||
743 | static void | |
7d69de61 | 744 | expand_stack_vars (bool (*pred) (tree)) |
1f6d3a08 RH |
745 | { |
746 | size_t si, i, j, n = stack_vars_num; | |
747 | ||
748 | for (si = 0; si < n; ++si) | |
749 | { | |
750 | HOST_WIDE_INT offset; | |
751 | ||
752 | i = stack_vars_sorted[si]; | |
753 | ||
754 | /* Skip variables that aren't partition representatives, for now. */ | |
755 | if (stack_vars[i].representative != i) | |
756 | continue; | |
757 | ||
7d69de61 RH |
758 | /* Skip variables that have already had rtl assigned. See also |
759 | add_stack_var where we perpetrate this pc_rtx hack. */ | |
4e3825db MM |
760 | if ((TREE_CODE (stack_vars[i].decl) == SSA_NAME |
761 | ? SA.partition_to_pseudo[var_to_partition (SA.map, stack_vars[i].decl)] | |
762 | : DECL_RTL (stack_vars[i].decl)) != pc_rtx) | |
7d69de61 RH |
763 | continue; |
764 | ||
c22cacf3 | 765 | /* Check the predicate to see whether this variable should be |
7d69de61 RH |
766 | allocated in this pass. */ |
767 | if (pred && !pred (stack_vars[i].decl)) | |
768 | continue; | |
769 | ||
1f6d3a08 RH |
770 | offset = alloc_stack_frame_space (stack_vars[i].size, |
771 | stack_vars[i].alignb); | |
772 | ||
773 | /* Create rtl for each variable based on their location within the | |
774 | partition. */ | |
775 | for (j = i; j != EOC; j = stack_vars[j].next) | |
f8da8190 AP |
776 | { |
777 | gcc_assert (stack_vars[j].offset <= stack_vars[i].size); | |
778 | expand_one_stack_var_at (stack_vars[j].decl, | |
779 | stack_vars[j].offset + offset); | |
780 | } | |
1f6d3a08 RH |
781 | } |
782 | } | |
783 | ||
ff28a94d JH |
784 | /* Take into account all sizes of partitions and reset DECL_RTLs. */ |
785 | static HOST_WIDE_INT | |
786 | account_stack_vars (void) | |
787 | { | |
788 | size_t si, j, i, n = stack_vars_num; | |
789 | HOST_WIDE_INT size = 0; | |
790 | ||
791 | for (si = 0; si < n; ++si) | |
792 | { | |
793 | i = stack_vars_sorted[si]; | |
794 | ||
795 | /* Skip variables that aren't partition representatives, for now. */ | |
796 | if (stack_vars[i].representative != i) | |
797 | continue; | |
798 | ||
799 | size += stack_vars[i].size; | |
800 | for (j = i; j != EOC; j = stack_vars[j].next) | |
4e3825db | 801 | set_rtl (stack_vars[j].decl, NULL); |
ff28a94d JH |
802 | } |
803 | return size; | |
804 | } | |
805 | ||
1f6d3a08 RH |
806 | /* A subroutine of expand_one_var. Called to immediately assign rtl |
807 | to a variable to be allocated in the stack frame. */ | |
808 | ||
809 | static void | |
810 | expand_one_stack_var (tree var) | |
811 | { | |
812 | HOST_WIDE_INT size, offset, align; | |
813 | ||
4e3825db MM |
814 | size = tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var)), 1); |
815 | align = get_decl_align_unit (SSAVAR (var)); | |
1f6d3a08 RH |
816 | offset = alloc_stack_frame_space (size, align); |
817 | ||
818 | expand_one_stack_var_at (var, offset); | |
819 | } | |
820 | ||
1f6d3a08 RH |
821 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL |
822 | that will reside in a hard register. */ | |
823 | ||
824 | static void | |
825 | expand_one_hard_reg_var (tree var) | |
826 | { | |
827 | rest_of_decl_compilation (var, 0, 0); | |
828 | } | |
829 | ||
830 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL | |
831 | that will reside in a pseudo register. */ | |
832 | ||
833 | static void | |
834 | expand_one_register_var (tree var) | |
835 | { | |
4e3825db MM |
836 | tree decl = SSAVAR (var); |
837 | tree type = TREE_TYPE (decl); | |
cde0f3fd | 838 | enum machine_mode reg_mode = promote_decl_mode (decl, NULL); |
1f6d3a08 RH |
839 | rtx x = gen_reg_rtx (reg_mode); |
840 | ||
4e3825db | 841 | set_rtl (var, x); |
1f6d3a08 RH |
842 | |
843 | /* Note if the object is a user variable. */ | |
4e3825db MM |
844 | if (!DECL_ARTIFICIAL (decl)) |
845 | mark_user_reg (x); | |
1f6d3a08 | 846 | |
61021c2c | 847 | if (POINTER_TYPE_P (type)) |
4e3825db | 848 | mark_reg_pointer (x, TYPE_ALIGN (TREE_TYPE (type))); |
1f6d3a08 RH |
849 | } |
850 | ||
851 | /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that | |
128a79fb | 852 | has some associated error, e.g. its type is error-mark. We just need |
1f6d3a08 RH |
853 | to pick something that won't crash the rest of the compiler. */ |
854 | ||
855 | static void | |
856 | expand_one_error_var (tree var) | |
857 | { | |
858 | enum machine_mode mode = DECL_MODE (var); | |
859 | rtx x; | |
860 | ||
861 | if (mode == BLKmode) | |
862 | x = gen_rtx_MEM (BLKmode, const0_rtx); | |
863 | else if (mode == VOIDmode) | |
864 | x = const0_rtx; | |
865 | else | |
866 | x = gen_reg_rtx (mode); | |
867 | ||
868 | SET_DECL_RTL (var, x); | |
869 | } | |
870 | ||
c22cacf3 | 871 | /* A subroutine of expand_one_var. VAR is a variable that will be |
1f6d3a08 RH |
872 | allocated to the local stack frame. Return true if we wish to |
873 | add VAR to STACK_VARS so that it will be coalesced with other | |
874 | variables. Return false to allocate VAR immediately. | |
875 | ||
876 | This function is used to reduce the number of variables considered | |
877 | for coalescing, which reduces the size of the quadratic problem. */ | |
878 | ||
879 | static bool | |
880 | defer_stack_allocation (tree var, bool toplevel) | |
881 | { | |
7d69de61 RH |
882 | /* If stack protection is enabled, *all* stack variables must be deferred, |
883 | so that we can re-order the strings to the top of the frame. */ | |
884 | if (flag_stack_protect) | |
885 | return true; | |
886 | ||
1f6d3a08 RH |
887 | /* Variables in the outermost scope automatically conflict with |
888 | every other variable. The only reason to want to defer them | |
889 | at all is that, after sorting, we can more efficiently pack | |
890 | small variables in the stack frame. Continue to defer at -O2. */ | |
891 | if (toplevel && optimize < 2) | |
892 | return false; | |
893 | ||
894 | /* Without optimization, *most* variables are allocated from the | |
895 | stack, which makes the quadratic problem large exactly when we | |
c22cacf3 | 896 | want compilation to proceed as quickly as possible. On the |
1f6d3a08 RH |
897 | other hand, we don't want the function's stack frame size to |
898 | get completely out of hand. So we avoid adding scalars and | |
899 | "small" aggregates to the list at all. */ | |
900 | if (optimize == 0 && tree_low_cst (DECL_SIZE_UNIT (var), 1) < 32) | |
901 | return false; | |
902 | ||
903 | return true; | |
904 | } | |
905 | ||
906 | /* A subroutine of expand_used_vars. Expand one variable according to | |
2a7e31df | 907 | its flavor. Variables to be placed on the stack are not actually |
b8698a0f | 908 | expanded yet, merely recorded. |
ff28a94d JH |
909 | When REALLY_EXPAND is false, only add stack values to be allocated. |
910 | Return stack usage this variable is supposed to take. | |
911 | */ | |
1f6d3a08 | 912 | |
ff28a94d JH |
913 | static HOST_WIDE_INT |
914 | expand_one_var (tree var, bool toplevel, bool really_expand) | |
1f6d3a08 | 915 | { |
4e3825db MM |
916 | tree origvar = var; |
917 | var = SSAVAR (var); | |
918 | ||
2e3f842f L |
919 | if (SUPPORTS_STACK_ALIGNMENT |
920 | && TREE_TYPE (var) != error_mark_node | |
921 | && TREE_CODE (var) == VAR_DECL) | |
922 | { | |
923 | unsigned int align; | |
924 | ||
925 | /* Because we don't know if VAR will be in register or on stack, | |
926 | we conservatively assume it will be on stack even if VAR is | |
927 | eventually put into register after RA pass. For non-automatic | |
928 | variables, which won't be on stack, we collect alignment of | |
929 | type and ignore user specified alignment. */ | |
930 | if (TREE_STATIC (var) || DECL_EXTERNAL (var)) | |
ae58e548 JJ |
931 | align = MINIMUM_ALIGNMENT (TREE_TYPE (var), |
932 | TYPE_MODE (TREE_TYPE (var)), | |
933 | TYPE_ALIGN (TREE_TYPE (var))); | |
2e3f842f | 934 | else |
ae58e548 | 935 | align = MINIMUM_ALIGNMENT (var, DECL_MODE (var), DECL_ALIGN (var)); |
2e3f842f L |
936 | |
937 | if (crtl->stack_alignment_estimated < align) | |
938 | { | |
939 | /* stack_alignment_estimated shouldn't change after stack | |
940 | realign decision made */ | |
941 | gcc_assert(!crtl->stack_realign_processed); | |
942 | crtl->stack_alignment_estimated = align; | |
943 | } | |
944 | } | |
945 | ||
4e3825db MM |
946 | if (TREE_CODE (origvar) == SSA_NAME) |
947 | { | |
948 | gcc_assert (TREE_CODE (var) != VAR_DECL | |
949 | || (!DECL_EXTERNAL (var) | |
950 | && !DECL_HAS_VALUE_EXPR_P (var) | |
951 | && !TREE_STATIC (var) | |
4e3825db MM |
952 | && TREE_TYPE (var) != error_mark_node |
953 | && !DECL_HARD_REGISTER (var) | |
954 | && really_expand)); | |
955 | } | |
956 | if (TREE_CODE (var) != VAR_DECL && TREE_CODE (origvar) != SSA_NAME) | |
4846b435 | 957 | ; |
1f6d3a08 RH |
958 | else if (DECL_EXTERNAL (var)) |
959 | ; | |
833b3afe | 960 | else if (DECL_HAS_VALUE_EXPR_P (var)) |
1f6d3a08 RH |
961 | ; |
962 | else if (TREE_STATIC (var)) | |
7e8b322a | 963 | ; |
eb7adebc | 964 | else if (TREE_CODE (origvar) != SSA_NAME && DECL_RTL_SET_P (var)) |
1f6d3a08 RH |
965 | ; |
966 | else if (TREE_TYPE (var) == error_mark_node) | |
ff28a94d JH |
967 | { |
968 | if (really_expand) | |
969 | expand_one_error_var (var); | |
970 | } | |
4e3825db | 971 | else if (TREE_CODE (var) == VAR_DECL && DECL_HARD_REGISTER (var)) |
ff28a94d JH |
972 | { |
973 | if (really_expand) | |
974 | expand_one_hard_reg_var (var); | |
975 | } | |
1f6d3a08 | 976 | else if (use_register_for_decl (var)) |
ff28a94d JH |
977 | { |
978 | if (really_expand) | |
4e3825db | 979 | expand_one_register_var (origvar); |
ff28a94d | 980 | } |
7604eb4e JJ |
981 | else if (!host_integerp (DECL_SIZE_UNIT (var), 1)) |
982 | { | |
983 | if (really_expand) | |
984 | { | |
985 | error ("size of variable %q+D is too large", var); | |
986 | expand_one_error_var (var); | |
987 | } | |
988 | } | |
1f6d3a08 | 989 | else if (defer_stack_allocation (var, toplevel)) |
4e3825db | 990 | add_stack_var (origvar); |
1f6d3a08 | 991 | else |
ff28a94d | 992 | { |
bd9f1b4b | 993 | if (really_expand) |
4e3825db | 994 | expand_one_stack_var (origvar); |
ff28a94d JH |
995 | return tree_low_cst (DECL_SIZE_UNIT (var), 1); |
996 | } | |
997 | return 0; | |
1f6d3a08 RH |
998 | } |
999 | ||
1000 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1001 | expanding variables. Those variables that can be put into registers | |
1002 | are allocated pseudos; those that can't are put on the stack. | |
1003 | ||
1004 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1005 | ||
1006 | static void | |
1007 | expand_used_vars_for_block (tree block, bool toplevel) | |
1008 | { | |
1009 | size_t i, j, old_sv_num, this_sv_num, new_sv_num; | |
1010 | tree t; | |
1011 | ||
1012 | old_sv_num = toplevel ? 0 : stack_vars_num; | |
1013 | ||
1014 | /* Expand all variables at this level. */ | |
1015 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
7e8b322a | 1016 | if (TREE_USED (t)) |
ff28a94d | 1017 | expand_one_var (t, toplevel, true); |
1f6d3a08 RH |
1018 | |
1019 | this_sv_num = stack_vars_num; | |
1020 | ||
1021 | /* Expand all variables at containing levels. */ | |
1022 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1023 | expand_used_vars_for_block (t, false); | |
1024 | ||
1025 | /* Since we do not track exact variable lifetimes (which is not even | |
6fc0bb99 | 1026 | possible for variables whose address escapes), we mirror the block |
1f6d3a08 | 1027 | tree in the interference graph. Here we cause all variables at this |
2bdbbe94 | 1028 | level, and all sublevels, to conflict. */ |
1f6d3a08 RH |
1029 | if (old_sv_num < this_sv_num) |
1030 | { | |
1031 | new_sv_num = stack_vars_num; | |
1f6d3a08 RH |
1032 | |
1033 | for (i = old_sv_num; i < new_sv_num; ++i) | |
2bdbbe94 | 1034 | for (j = i < this_sv_num ? i : this_sv_num; j-- > old_sv_num ;) |
f4a6d54e | 1035 | add_stack_var_conflict (i, j); |
1f6d3a08 RH |
1036 | } |
1037 | } | |
1038 | ||
1039 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree | |
1040 | and clear TREE_USED on all local variables. */ | |
1041 | ||
1042 | static void | |
1043 | clear_tree_used (tree block) | |
1044 | { | |
1045 | tree t; | |
1046 | ||
1047 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1048 | /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */ | |
1049 | TREE_USED (t) = 0; | |
1050 | ||
1051 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1052 | clear_tree_used (t); | |
1053 | } | |
1054 | ||
7d69de61 RH |
1055 | /* Examine TYPE and determine a bit mask of the following features. */ |
1056 | ||
1057 | #define SPCT_HAS_LARGE_CHAR_ARRAY 1 | |
1058 | #define SPCT_HAS_SMALL_CHAR_ARRAY 2 | |
1059 | #define SPCT_HAS_ARRAY 4 | |
1060 | #define SPCT_HAS_AGGREGATE 8 | |
1061 | ||
1062 | static unsigned int | |
1063 | stack_protect_classify_type (tree type) | |
1064 | { | |
1065 | unsigned int ret = 0; | |
1066 | tree t; | |
1067 | ||
1068 | switch (TREE_CODE (type)) | |
1069 | { | |
1070 | case ARRAY_TYPE: | |
1071 | t = TYPE_MAIN_VARIANT (TREE_TYPE (type)); | |
1072 | if (t == char_type_node | |
1073 | || t == signed_char_type_node | |
1074 | || t == unsigned_char_type_node) | |
1075 | { | |
15362b89 JJ |
1076 | unsigned HOST_WIDE_INT max = PARAM_VALUE (PARAM_SSP_BUFFER_SIZE); |
1077 | unsigned HOST_WIDE_INT len; | |
7d69de61 | 1078 | |
15362b89 JJ |
1079 | if (!TYPE_SIZE_UNIT (type) |
1080 | || !host_integerp (TYPE_SIZE_UNIT (type), 1)) | |
1081 | len = max; | |
7d69de61 | 1082 | else |
15362b89 | 1083 | len = tree_low_cst (TYPE_SIZE_UNIT (type), 1); |
7d69de61 RH |
1084 | |
1085 | if (len < max) | |
1086 | ret = SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1087 | else | |
1088 | ret = SPCT_HAS_LARGE_CHAR_ARRAY | SPCT_HAS_ARRAY; | |
1089 | } | |
1090 | else | |
1091 | ret = SPCT_HAS_ARRAY; | |
1092 | break; | |
1093 | ||
1094 | case UNION_TYPE: | |
1095 | case QUAL_UNION_TYPE: | |
1096 | case RECORD_TYPE: | |
1097 | ret = SPCT_HAS_AGGREGATE; | |
1098 | for (t = TYPE_FIELDS (type); t ; t = TREE_CHAIN (t)) | |
1099 | if (TREE_CODE (t) == FIELD_DECL) | |
1100 | ret |= stack_protect_classify_type (TREE_TYPE (t)); | |
1101 | break; | |
1102 | ||
1103 | default: | |
1104 | break; | |
1105 | } | |
1106 | ||
1107 | return ret; | |
1108 | } | |
1109 | ||
a4d05547 KH |
1110 | /* Return nonzero if DECL should be segregated into the "vulnerable" upper |
1111 | part of the local stack frame. Remember if we ever return nonzero for | |
7d69de61 RH |
1112 | any variable in this function. The return value is the phase number in |
1113 | which the variable should be allocated. */ | |
1114 | ||
1115 | static int | |
1116 | stack_protect_decl_phase (tree decl) | |
1117 | { | |
1118 | unsigned int bits = stack_protect_classify_type (TREE_TYPE (decl)); | |
1119 | int ret = 0; | |
1120 | ||
1121 | if (bits & SPCT_HAS_SMALL_CHAR_ARRAY) | |
1122 | has_short_buffer = true; | |
1123 | ||
1124 | if (flag_stack_protect == 2) | |
1125 | { | |
1126 | if ((bits & (SPCT_HAS_SMALL_CHAR_ARRAY | SPCT_HAS_LARGE_CHAR_ARRAY)) | |
1127 | && !(bits & SPCT_HAS_AGGREGATE)) | |
1128 | ret = 1; | |
1129 | else if (bits & SPCT_HAS_ARRAY) | |
1130 | ret = 2; | |
1131 | } | |
1132 | else | |
1133 | ret = (bits & SPCT_HAS_LARGE_CHAR_ARRAY) != 0; | |
1134 | ||
1135 | if (ret) | |
1136 | has_protected_decls = true; | |
1137 | ||
1138 | return ret; | |
1139 | } | |
1140 | ||
1141 | /* Two helper routines that check for phase 1 and phase 2. These are used | |
1142 | as callbacks for expand_stack_vars. */ | |
1143 | ||
1144 | static bool | |
1145 | stack_protect_decl_phase_1 (tree decl) | |
1146 | { | |
1147 | return stack_protect_decl_phase (decl) == 1; | |
1148 | } | |
1149 | ||
1150 | static bool | |
1151 | stack_protect_decl_phase_2 (tree decl) | |
1152 | { | |
1153 | return stack_protect_decl_phase (decl) == 2; | |
1154 | } | |
1155 | ||
1156 | /* Ensure that variables in different stack protection phases conflict | |
1157 | so that they are not merged and share the same stack slot. */ | |
1158 | ||
1159 | static void | |
1160 | add_stack_protection_conflicts (void) | |
1161 | { | |
1162 | size_t i, j, n = stack_vars_num; | |
1163 | unsigned char *phase; | |
1164 | ||
1165 | phase = XNEWVEC (unsigned char, n); | |
1166 | for (i = 0; i < n; ++i) | |
1167 | phase[i] = stack_protect_decl_phase (stack_vars[i].decl); | |
1168 | ||
1169 | for (i = 0; i < n; ++i) | |
1170 | { | |
1171 | unsigned char ph_i = phase[i]; | |
1172 | for (j = 0; j < i; ++j) | |
1173 | if (ph_i != phase[j]) | |
1174 | add_stack_var_conflict (i, j); | |
1175 | } | |
1176 | ||
1177 | XDELETEVEC (phase); | |
1178 | } | |
1179 | ||
1180 | /* Create a decl for the guard at the top of the stack frame. */ | |
1181 | ||
1182 | static void | |
1183 | create_stack_guard (void) | |
1184 | { | |
c2255bc4 AH |
1185 | tree guard = build_decl (DECL_SOURCE_LOCATION (current_function_decl), |
1186 | VAR_DECL, NULL, ptr_type_node); | |
7d69de61 RH |
1187 | TREE_THIS_VOLATILE (guard) = 1; |
1188 | TREE_USED (guard) = 1; | |
1189 | expand_one_stack_var (guard); | |
cb91fab0 | 1190 | crtl->stack_protect_guard = guard; |
7d69de61 RH |
1191 | } |
1192 | ||
ff28a94d JH |
1193 | /* A subroutine of expand_used_vars. Walk down through the BLOCK tree |
1194 | expanding variables. Those variables that can be put into registers | |
1195 | are allocated pseudos; those that can't are put on the stack. | |
1196 | ||
1197 | TOPLEVEL is true if this is the outermost BLOCK. */ | |
1198 | ||
1199 | static HOST_WIDE_INT | |
1200 | account_used_vars_for_block (tree block, bool toplevel) | |
1201 | { | |
ff28a94d JH |
1202 | tree t; |
1203 | HOST_WIDE_INT size = 0; | |
1204 | ||
ff28a94d JH |
1205 | /* Expand all variables at this level. */ |
1206 | for (t = BLOCK_VARS (block); t ; t = TREE_CHAIN (t)) | |
1207 | if (TREE_USED (t)) | |
1208 | size += expand_one_var (t, toplevel, false); | |
1209 | ||
ff28a94d JH |
1210 | /* Expand all variables at containing levels. */ |
1211 | for (t = BLOCK_SUBBLOCKS (block); t ; t = BLOCK_CHAIN (t)) | |
1212 | size += account_used_vars_for_block (t, false); | |
1213 | ||
ff28a94d JH |
1214 | return size; |
1215 | } | |
1216 | ||
1217 | /* Prepare for expanding variables. */ | |
b8698a0f | 1218 | static void |
ff28a94d JH |
1219 | init_vars_expansion (void) |
1220 | { | |
1221 | tree t; | |
cb91fab0 JH |
1222 | /* Set TREE_USED on all variables in the local_decls. */ |
1223 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) | |
ff28a94d JH |
1224 | TREE_USED (TREE_VALUE (t)) = 1; |
1225 | ||
1226 | /* Clear TREE_USED on all variables associated with a block scope. */ | |
1227 | clear_tree_used (DECL_INITIAL (current_function_decl)); | |
1228 | ||
1229 | /* Initialize local stack smashing state. */ | |
1230 | has_protected_decls = false; | |
1231 | has_short_buffer = false; | |
1232 | } | |
1233 | ||
1234 | /* Free up stack variable graph data. */ | |
1235 | static void | |
1236 | fini_vars_expansion (void) | |
1237 | { | |
2bdbbe94 MM |
1238 | size_t i, n = stack_vars_num; |
1239 | for (i = 0; i < n; i++) | |
1240 | BITMAP_FREE (stack_vars[i].conflicts); | |
ff28a94d JH |
1241 | XDELETEVEC (stack_vars); |
1242 | XDELETEVEC (stack_vars_sorted); | |
ff28a94d JH |
1243 | stack_vars = NULL; |
1244 | stack_vars_alloc = stack_vars_num = 0; | |
ff28a94d JH |
1245 | } |
1246 | ||
b5a430f3 SB |
1247 | /* Make a fair guess for the size of the stack frame of the current |
1248 | function. This doesn't have to be exact, the result is only used | |
1249 | in the inline heuristics. So we don't want to run the full stack | |
1250 | var packing algorithm (which is quadratic in the number of stack | |
1251 | vars). Instead, we calculate the total size of all stack vars. | |
1252 | This turns out to be a pretty fair estimate -- packing of stack | |
1253 | vars doesn't happen very often. */ | |
1254 | ||
ff28a94d JH |
1255 | HOST_WIDE_INT |
1256 | estimated_stack_frame_size (void) | |
1257 | { | |
1258 | HOST_WIDE_INT size = 0; | |
b5a430f3 | 1259 | size_t i; |
ff28a94d JH |
1260 | tree t, outer_block = DECL_INITIAL (current_function_decl); |
1261 | ||
1262 | init_vars_expansion (); | |
1263 | ||
cb91fab0 | 1264 | for (t = cfun->local_decls; t; t = TREE_CHAIN (t)) |
ff28a94d JH |
1265 | { |
1266 | tree var = TREE_VALUE (t); | |
1267 | ||
1268 | if (TREE_USED (var)) | |
1269 | size += expand_one_var (var, true, false); | |
1270 | TREE_USED (var) = 1; | |
1271 | } | |
1272 | size += account_used_vars_for_block (outer_block, true); | |
b5a430f3 | 1273 | |
ff28a94d JH |
1274 | if (stack_vars_num > 0) |
1275 | { | |
b5a430f3 SB |
1276 | /* Fake sorting the stack vars for account_stack_vars (). */ |
1277 | stack_vars_sorted = XNEWVEC (size_t, stack_vars_num); | |
1278 | for (i = 0; i < stack_vars_num; ++i) | |
1279 | stack_vars_sorted[i] = i; | |
ff28a94d JH |
1280 | size += account_stack_vars (); |
1281 | fini_vars_expansion (); | |
1282 | } | |
b5a430f3 | 1283 | |
ff28a94d JH |
1284 | return size; |
1285 | } | |
1286 | ||
1f6d3a08 | 1287 | /* Expand all variables used in the function. */ |
727a31fa RH |
1288 | |
1289 | static void | |
1290 | expand_used_vars (void) | |
1291 | { | |
802e9f8e | 1292 | tree t, next, outer_block = DECL_INITIAL (current_function_decl); |
6c6366f6 | 1293 | tree maybe_local_decls = NULL_TREE; |
4e3825db | 1294 | unsigned i; |
727a31fa | 1295 | |
1f6d3a08 RH |
1296 | /* Compute the phase of the stack frame for this function. */ |
1297 | { | |
1298 | int align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1299 | int off = STARTING_FRAME_OFFSET % align; | |
1300 | frame_phase = off ? align - off : 0; | |
1301 | } | |
727a31fa | 1302 | |
ff28a94d | 1303 | init_vars_expansion (); |
7d69de61 | 1304 | |
4e3825db MM |
1305 | for (i = 0; i < SA.map->num_partitions; i++) |
1306 | { | |
1307 | tree var = partition_to_var (SA.map, i); | |
1308 | ||
1309 | gcc_assert (is_gimple_reg (var)); | |
1310 | if (TREE_CODE (SSA_NAME_VAR (var)) == VAR_DECL) | |
1311 | expand_one_var (var, true, true); | |
1312 | else | |
1313 | { | |
1314 | /* This is a PARM_DECL or RESULT_DECL. For those partitions that | |
1315 | contain the default def (representing the parm or result itself) | |
1316 | we don't do anything here. But those which don't contain the | |
1317 | default def (representing a temporary based on the parm/result) | |
1318 | we need to allocate space just like for normal VAR_DECLs. */ | |
1319 | if (!bitmap_bit_p (SA.partition_has_default_def, i)) | |
1320 | { | |
1321 | expand_one_var (var, true, true); | |
1322 | gcc_assert (SA.partition_to_pseudo[i]); | |
1323 | } | |
1324 | } | |
1325 | } | |
1326 | ||
cb91fab0 | 1327 | /* At this point all variables on the local_decls with TREE_USED |
1f6d3a08 | 1328 | set are not associated with any block scope. Lay them out. */ |
802e9f8e JJ |
1329 | t = cfun->local_decls; |
1330 | cfun->local_decls = NULL_TREE; | |
1331 | for (; t; t = next) | |
1f6d3a08 RH |
1332 | { |
1333 | tree var = TREE_VALUE (t); | |
1334 | bool expand_now = false; | |
1335 | ||
802e9f8e JJ |
1336 | next = TREE_CHAIN (t); |
1337 | ||
4e3825db MM |
1338 | /* Expanded above already. */ |
1339 | if (is_gimple_reg (var)) | |
eb7adebc MM |
1340 | { |
1341 | TREE_USED (var) = 0; | |
3adcf52c | 1342 | goto next; |
eb7adebc | 1343 | } |
1f6d3a08 RH |
1344 | /* We didn't set a block for static or extern because it's hard |
1345 | to tell the difference between a global variable (re)declared | |
1346 | in a local scope, and one that's really declared there to | |
1347 | begin with. And it doesn't really matter much, since we're | |
1348 | not giving them stack space. Expand them now. */ | |
4e3825db | 1349 | else if (TREE_STATIC (var) || DECL_EXTERNAL (var)) |
1f6d3a08 RH |
1350 | expand_now = true; |
1351 | ||
1352 | /* If the variable is not associated with any block, then it | |
1353 | was created by the optimizers, and could be live anywhere | |
1354 | in the function. */ | |
1355 | else if (TREE_USED (var)) | |
1356 | expand_now = true; | |
1357 | ||
1358 | /* Finally, mark all variables on the list as used. We'll use | |
1359 | this in a moment when we expand those associated with scopes. */ | |
1360 | TREE_USED (var) = 1; | |
1361 | ||
1362 | if (expand_now) | |
3adcf52c JM |
1363 | expand_one_var (var, true, true); |
1364 | ||
1365 | next: | |
1366 | if (DECL_ARTIFICIAL (var) && !DECL_IGNORED_P (var)) | |
802e9f8e | 1367 | { |
3adcf52c JM |
1368 | rtx rtl = DECL_RTL_IF_SET (var); |
1369 | ||
1370 | /* Keep artificial non-ignored vars in cfun->local_decls | |
1371 | chain until instantiate_decls. */ | |
1372 | if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
802e9f8e | 1373 | { |
3adcf52c JM |
1374 | TREE_CHAIN (t) = cfun->local_decls; |
1375 | cfun->local_decls = t; | |
1376 | continue; | |
802e9f8e | 1377 | } |
6c6366f6 JJ |
1378 | else if (rtl == NULL_RTX) |
1379 | { | |
1380 | /* If rtl isn't set yet, which can happen e.g. with | |
1381 | -fstack-protector, retry before returning from this | |
1382 | function. */ | |
1383 | TREE_CHAIN (t) = maybe_local_decls; | |
1384 | maybe_local_decls = t; | |
1385 | continue; | |
1386 | } | |
802e9f8e JJ |
1387 | } |
1388 | ||
1389 | ggc_free (t); | |
1f6d3a08 | 1390 | } |
1f6d3a08 RH |
1391 | |
1392 | /* At this point, all variables within the block tree with TREE_USED | |
1393 | set are actually used by the optimized function. Lay them out. */ | |
1394 | expand_used_vars_for_block (outer_block, true); | |
1395 | ||
1396 | if (stack_vars_num > 0) | |
1397 | { | |
1398 | /* Due to the way alias sets work, no variables with non-conflicting | |
c22cacf3 | 1399 | alias sets may be assigned the same address. Add conflicts to |
1f6d3a08 RH |
1400 | reflect this. */ |
1401 | add_alias_set_conflicts (); | |
1402 | ||
c22cacf3 | 1403 | /* If stack protection is enabled, we don't share space between |
7d69de61 RH |
1404 | vulnerable data and non-vulnerable data. */ |
1405 | if (flag_stack_protect) | |
1406 | add_stack_protection_conflicts (); | |
1407 | ||
c22cacf3 | 1408 | /* Now that we have collected all stack variables, and have computed a |
1f6d3a08 RH |
1409 | minimal interference graph, attempt to save some stack space. */ |
1410 | partition_stack_vars (); | |
1411 | if (dump_file) | |
1412 | dump_stack_var_partition (); | |
7d69de61 RH |
1413 | } |
1414 | ||
1415 | /* There are several conditions under which we should create a | |
1416 | stack guard: protect-all, alloca used, protected decls present. */ | |
1417 | if (flag_stack_protect == 2 | |
1418 | || (flag_stack_protect | |
e3b5732b | 1419 | && (cfun->calls_alloca || has_protected_decls))) |
7d69de61 | 1420 | create_stack_guard (); |
1f6d3a08 | 1421 | |
7d69de61 RH |
1422 | /* Assign rtl to each variable based on these partitions. */ |
1423 | if (stack_vars_num > 0) | |
1424 | { | |
1425 | /* Reorder decls to be protected by iterating over the variables | |
1426 | array multiple times, and allocating out of each phase in turn. */ | |
c22cacf3 | 1427 | /* ??? We could probably integrate this into the qsort we did |
7d69de61 RH |
1428 | earlier, such that we naturally see these variables first, |
1429 | and thus naturally allocate things in the right order. */ | |
1430 | if (has_protected_decls) | |
1431 | { | |
1432 | /* Phase 1 contains only character arrays. */ | |
1433 | expand_stack_vars (stack_protect_decl_phase_1); | |
1434 | ||
1435 | /* Phase 2 contains other kinds of arrays. */ | |
1436 | if (flag_stack_protect == 2) | |
1437 | expand_stack_vars (stack_protect_decl_phase_2); | |
1438 | } | |
1439 | ||
1440 | expand_stack_vars (NULL); | |
1f6d3a08 | 1441 | |
ff28a94d | 1442 | fini_vars_expansion (); |
1f6d3a08 RH |
1443 | } |
1444 | ||
6c6366f6 JJ |
1445 | /* If there were any artificial non-ignored vars without rtl |
1446 | found earlier, see if deferred stack allocation hasn't assigned | |
1447 | rtl to them. */ | |
1448 | for (t = maybe_local_decls; t; t = next) | |
1449 | { | |
1450 | tree var = TREE_VALUE (t); | |
1451 | rtx rtl = DECL_RTL_IF_SET (var); | |
1452 | ||
1453 | next = TREE_CHAIN (t); | |
1454 | ||
1455 | /* Keep artificial non-ignored vars in cfun->local_decls | |
1456 | chain until instantiate_decls. */ | |
1457 | if (rtl && (MEM_P (rtl) || GET_CODE (rtl) == CONCAT)) | |
1458 | { | |
1459 | TREE_CHAIN (t) = cfun->local_decls; | |
1460 | cfun->local_decls = t; | |
1461 | continue; | |
1462 | } | |
1463 | ||
1464 | ggc_free (t); | |
1465 | } | |
1466 | ||
1f6d3a08 RH |
1467 | /* If the target requires that FRAME_OFFSET be aligned, do it. */ |
1468 | if (STACK_ALIGNMENT_NEEDED) | |
1469 | { | |
1470 | HOST_WIDE_INT align = PREFERRED_STACK_BOUNDARY / BITS_PER_UNIT; | |
1471 | if (!FRAME_GROWS_DOWNWARD) | |
1472 | frame_offset += align - 1; | |
1473 | frame_offset &= -align; | |
1474 | } | |
727a31fa RH |
1475 | } |
1476 | ||
1477 | ||
b7211528 SB |
1478 | /* If we need to produce a detailed dump, print the tree representation |
1479 | for STMT to the dump file. SINCE is the last RTX after which the RTL | |
1480 | generated for STMT should have been appended. */ | |
1481 | ||
1482 | static void | |
726a989a | 1483 | maybe_dump_rtl_for_gimple_stmt (gimple stmt, rtx since) |
b7211528 SB |
1484 | { |
1485 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
1486 | { | |
1487 | fprintf (dump_file, "\n;; "); | |
b5b8b0ac AO |
1488 | print_gimple_stmt (dump_file, stmt, 0, |
1489 | TDF_SLIM | (dump_flags & TDF_LINENO)); | |
b7211528 SB |
1490 | fprintf (dump_file, "\n"); |
1491 | ||
1492 | print_rtl (dump_file, since ? NEXT_INSN (since) : since); | |
1493 | } | |
1494 | } | |
1495 | ||
8b11009b ZD |
1496 | /* Maps the blocks that do not contain tree labels to rtx labels. */ |
1497 | ||
1498 | static struct pointer_map_t *lab_rtx_for_bb; | |
1499 | ||
a9b77cd1 ZD |
1500 | /* Returns the label_rtx expression for a label starting basic block BB. */ |
1501 | ||
1502 | static rtx | |
726a989a | 1503 | label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED) |
a9b77cd1 | 1504 | { |
726a989a RB |
1505 | gimple_stmt_iterator gsi; |
1506 | tree lab; | |
1507 | gimple lab_stmt; | |
8b11009b | 1508 | void **elt; |
a9b77cd1 ZD |
1509 | |
1510 | if (bb->flags & BB_RTL) | |
1511 | return block_label (bb); | |
1512 | ||
8b11009b ZD |
1513 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
1514 | if (elt) | |
ae50c0cb | 1515 | return (rtx) *elt; |
8b11009b ZD |
1516 | |
1517 | /* Find the tree label if it is present. */ | |
b8698a0f | 1518 | |
726a989a | 1519 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
a9b77cd1 | 1520 | { |
726a989a RB |
1521 | lab_stmt = gsi_stmt (gsi); |
1522 | if (gimple_code (lab_stmt) != GIMPLE_LABEL) | |
a9b77cd1 ZD |
1523 | break; |
1524 | ||
726a989a | 1525 | lab = gimple_label_label (lab_stmt); |
a9b77cd1 ZD |
1526 | if (DECL_NONLOCAL (lab)) |
1527 | break; | |
1528 | ||
1529 | return label_rtx (lab); | |
1530 | } | |
1531 | ||
8b11009b ZD |
1532 | elt = pointer_map_insert (lab_rtx_for_bb, bb); |
1533 | *elt = gen_label_rtx (); | |
ae50c0cb | 1534 | return (rtx) *elt; |
a9b77cd1 ZD |
1535 | } |
1536 | ||
726a989a | 1537 | |
529ff441 MM |
1538 | /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge |
1539 | of a basic block where we just expanded the conditional at the end, | |
315adeda MM |
1540 | possibly clean up the CFG and instruction sequence. LAST is the |
1541 | last instruction before the just emitted jump sequence. */ | |
529ff441 MM |
1542 | |
1543 | static void | |
315adeda | 1544 | maybe_cleanup_end_of_block (edge e, rtx last) |
529ff441 MM |
1545 | { |
1546 | /* Special case: when jumpif decides that the condition is | |
1547 | trivial it emits an unconditional jump (and the necessary | |
1548 | barrier). But we still have two edges, the fallthru one is | |
1549 | wrong. purge_dead_edges would clean this up later. Unfortunately | |
1550 | we have to insert insns (and split edges) before | |
1551 | find_many_sub_basic_blocks and hence before purge_dead_edges. | |
1552 | But splitting edges might create new blocks which depend on the | |
1553 | fact that if there are two edges there's no barrier. So the | |
1554 | barrier would get lost and verify_flow_info would ICE. Instead | |
1555 | of auditing all edge splitters to care for the barrier (which | |
1556 | normally isn't there in a cleaned CFG), fix it here. */ | |
1557 | if (BARRIER_P (get_last_insn ())) | |
1558 | { | |
529ff441 MM |
1559 | rtx insn; |
1560 | remove_edge (e); | |
1561 | /* Now, we have a single successor block, if we have insns to | |
1562 | insert on the remaining edge we potentially will insert | |
1563 | it at the end of this block (if the dest block isn't feasible) | |
1564 | in order to avoid splitting the edge. This insertion will take | |
1565 | place in front of the last jump. But we might have emitted | |
1566 | multiple jumps (conditional and one unconditional) to the | |
1567 | same destination. Inserting in front of the last one then | |
1568 | is a problem. See PR 40021. We fix this by deleting all | |
1569 | jumps except the last unconditional one. */ | |
1570 | insn = PREV_INSN (get_last_insn ()); | |
1571 | /* Make sure we have an unconditional jump. Otherwise we're | |
1572 | confused. */ | |
1573 | gcc_assert (JUMP_P (insn) && !any_condjump_p (insn)); | |
315adeda | 1574 | for (insn = PREV_INSN (insn); insn != last;) |
529ff441 MM |
1575 | { |
1576 | insn = PREV_INSN (insn); | |
1577 | if (JUMP_P (NEXT_INSN (insn))) | |
1578 | delete_insn (NEXT_INSN (insn)); | |
1579 | } | |
1580 | } | |
1581 | } | |
1582 | ||
726a989a | 1583 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND. |
80c7a9eb RH |
1584 | Returns a new basic block if we've terminated the current basic |
1585 | block and created a new one. */ | |
1586 | ||
1587 | static basic_block | |
726a989a | 1588 | expand_gimple_cond (basic_block bb, gimple stmt) |
80c7a9eb RH |
1589 | { |
1590 | basic_block new_bb, dest; | |
1591 | edge new_edge; | |
1592 | edge true_edge; | |
1593 | edge false_edge; | |
b7211528 | 1594 | rtx last2, last; |
28ed065e MM |
1595 | enum tree_code code; |
1596 | tree op0, op1; | |
1597 | ||
1598 | code = gimple_cond_code (stmt); | |
1599 | op0 = gimple_cond_lhs (stmt); | |
1600 | op1 = gimple_cond_rhs (stmt); | |
1601 | /* We're sometimes presented with such code: | |
1602 | D.123_1 = x < y; | |
1603 | if (D.123_1 != 0) | |
1604 | ... | |
1605 | This would expand to two comparisons which then later might | |
1606 | be cleaned up by combine. But some pattern matchers like if-conversion | |
1607 | work better when there's only one compare, so make up for this | |
1608 | here as special exception if TER would have made the same change. */ | |
1609 | if (gimple_cond_single_var_p (stmt) | |
1610 | && SA.values | |
1611 | && TREE_CODE (op0) == SSA_NAME | |
1612 | && bitmap_bit_p (SA.values, SSA_NAME_VERSION (op0))) | |
1613 | { | |
1614 | gimple second = SSA_NAME_DEF_STMT (op0); | |
e83f4b68 | 1615 | if (gimple_code (second) == GIMPLE_ASSIGN) |
28ed065e | 1616 | { |
e83f4b68 MM |
1617 | enum tree_code code2 = gimple_assign_rhs_code (second); |
1618 | if (TREE_CODE_CLASS (code2) == tcc_comparison) | |
1619 | { | |
1620 | code = code2; | |
1621 | op0 = gimple_assign_rhs1 (second); | |
1622 | op1 = gimple_assign_rhs2 (second); | |
1623 | } | |
1624 | /* If jumps are cheap turn some more codes into | |
1625 | jumpy sequences. */ | |
1626 | else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4) | |
1627 | { | |
1628 | if ((code2 == BIT_AND_EXPR | |
1629 | && TYPE_PRECISION (TREE_TYPE (op0)) == 1 | |
1630 | && TREE_CODE (gimple_assign_rhs2 (second)) != INTEGER_CST) | |
1631 | || code2 == TRUTH_AND_EXPR) | |
1632 | { | |
1633 | code = TRUTH_ANDIF_EXPR; | |
1634 | op0 = gimple_assign_rhs1 (second); | |
1635 | op1 = gimple_assign_rhs2 (second); | |
1636 | } | |
1637 | else if (code2 == BIT_IOR_EXPR || code2 == TRUTH_OR_EXPR) | |
1638 | { | |
1639 | code = TRUTH_ORIF_EXPR; | |
1640 | op0 = gimple_assign_rhs1 (second); | |
1641 | op1 = gimple_assign_rhs2 (second); | |
1642 | } | |
1643 | } | |
28ed065e MM |
1644 | } |
1645 | } | |
b7211528 SB |
1646 | |
1647 | last2 = last = get_last_insn (); | |
80c7a9eb RH |
1648 | |
1649 | extract_true_false_edges_from_block (bb, &true_edge, &false_edge); | |
726a989a | 1650 | if (gimple_has_location (stmt)) |
80c7a9eb | 1651 | { |
726a989a RB |
1652 | set_curr_insn_source_location (gimple_location (stmt)); |
1653 | set_curr_insn_block (gimple_block (stmt)); | |
80c7a9eb RH |
1654 | } |
1655 | ||
1656 | /* These flags have no purpose in RTL land. */ | |
1657 | true_edge->flags &= ~EDGE_TRUE_VALUE; | |
1658 | false_edge->flags &= ~EDGE_FALSE_VALUE; | |
1659 | ||
1660 | /* We can either have a pure conditional jump with one fallthru edge or | |
1661 | two-way jump that needs to be decomposed into two basic blocks. */ | |
a9b77cd1 | 1662 | if (false_edge->dest == bb->next_bb) |
80c7a9eb | 1663 | { |
40e90eac JJ |
1664 | jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1665 | true_edge->probability); | |
726a989a | 1666 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1667 | if (true_edge->goto_locus) |
7241571e JJ |
1668 | { |
1669 | set_curr_insn_source_location (true_edge->goto_locus); | |
1670 | set_curr_insn_block (true_edge->goto_block); | |
1671 | true_edge->goto_locus = curr_insn_locator (); | |
1672 | } | |
1673 | true_edge->goto_block = NULL; | |
a9b77cd1 | 1674 | false_edge->flags |= EDGE_FALLTHRU; |
315adeda | 1675 | maybe_cleanup_end_of_block (false_edge, last); |
80c7a9eb RH |
1676 | return NULL; |
1677 | } | |
a9b77cd1 | 1678 | if (true_edge->dest == bb->next_bb) |
80c7a9eb | 1679 | { |
40e90eac JJ |
1680 | jumpifnot_1 (code, op0, op1, label_rtx_for_bb (false_edge->dest), |
1681 | false_edge->probability); | |
726a989a | 1682 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
a9b77cd1 | 1683 | if (false_edge->goto_locus) |
7241571e JJ |
1684 | { |
1685 | set_curr_insn_source_location (false_edge->goto_locus); | |
1686 | set_curr_insn_block (false_edge->goto_block); | |
1687 | false_edge->goto_locus = curr_insn_locator (); | |
1688 | } | |
1689 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1690 | true_edge->flags |= EDGE_FALLTHRU; |
315adeda | 1691 | maybe_cleanup_end_of_block (true_edge, last); |
80c7a9eb RH |
1692 | return NULL; |
1693 | } | |
80c7a9eb | 1694 | |
40e90eac JJ |
1695 | jumpif_1 (code, op0, op1, label_rtx_for_bb (true_edge->dest), |
1696 | true_edge->probability); | |
80c7a9eb | 1697 | last = get_last_insn (); |
7241571e JJ |
1698 | if (false_edge->goto_locus) |
1699 | { | |
1700 | set_curr_insn_source_location (false_edge->goto_locus); | |
1701 | set_curr_insn_block (false_edge->goto_block); | |
1702 | false_edge->goto_locus = curr_insn_locator (); | |
1703 | } | |
1704 | false_edge->goto_block = NULL; | |
a9b77cd1 | 1705 | emit_jump (label_rtx_for_bb (false_edge->dest)); |
80c7a9eb RH |
1706 | |
1707 | BB_END (bb) = last; | |
1708 | if (BARRIER_P (BB_END (bb))) | |
1709 | BB_END (bb) = PREV_INSN (BB_END (bb)); | |
1710 | update_bb_for_insn (bb); | |
1711 | ||
1712 | new_bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
1713 | dest = false_edge->dest; | |
1714 | redirect_edge_succ (false_edge, new_bb); | |
1715 | false_edge->flags |= EDGE_FALLTHRU; | |
1716 | new_bb->count = false_edge->count; | |
1717 | new_bb->frequency = EDGE_FREQUENCY (false_edge); | |
1718 | new_edge = make_edge (new_bb, dest, 0); | |
1719 | new_edge->probability = REG_BR_PROB_BASE; | |
1720 | new_edge->count = new_bb->count; | |
1721 | if (BARRIER_P (BB_END (new_bb))) | |
1722 | BB_END (new_bb) = PREV_INSN (BB_END (new_bb)); | |
1723 | update_bb_for_insn (new_bb); | |
1724 | ||
726a989a | 1725 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
c22cacf3 | 1726 | |
7787b4aa JJ |
1727 | if (true_edge->goto_locus) |
1728 | { | |
1729 | set_curr_insn_source_location (true_edge->goto_locus); | |
1730 | set_curr_insn_block (true_edge->goto_block); | |
1731 | true_edge->goto_locus = curr_insn_locator (); | |
1732 | } | |
1733 | true_edge->goto_block = NULL; | |
1734 | ||
80c7a9eb RH |
1735 | return new_bb; |
1736 | } | |
1737 | ||
28ed065e MM |
1738 | /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL |
1739 | statement STMT. */ | |
1740 | ||
1741 | static void | |
1742 | expand_call_stmt (gimple stmt) | |
1743 | { | |
1744 | tree exp; | |
1745 | tree lhs = gimple_call_lhs (stmt); | |
28ed065e | 1746 | size_t i; |
e23817b3 RG |
1747 | bool builtin_p; |
1748 | tree decl; | |
28ed065e MM |
1749 | |
1750 | exp = build_vl_exp (CALL_EXPR, gimple_call_num_args (stmt) + 3); | |
1751 | ||
1752 | CALL_EXPR_FN (exp) = gimple_call_fn (stmt); | |
e23817b3 RG |
1753 | decl = gimple_call_fndecl (stmt); |
1754 | builtin_p = decl && DECL_BUILT_IN (decl); | |
1755 | ||
28ed065e MM |
1756 | TREE_TYPE (exp) = gimple_call_return_type (stmt); |
1757 | CALL_EXPR_STATIC_CHAIN (exp) = gimple_call_chain (stmt); | |
1758 | ||
1759 | for (i = 0; i < gimple_call_num_args (stmt); i++) | |
e23817b3 RG |
1760 | { |
1761 | tree arg = gimple_call_arg (stmt, i); | |
1762 | gimple def; | |
1763 | /* TER addresses into arguments of builtin functions so we have a | |
1764 | chance to infer more correct alignment information. See PR39954. */ | |
1765 | if (builtin_p | |
1766 | && TREE_CODE (arg) == SSA_NAME | |
1767 | && (def = get_gimple_for_ssa_name (arg)) | |
1768 | && gimple_assign_rhs_code (def) == ADDR_EXPR) | |
1769 | arg = gimple_assign_rhs1 (def); | |
1770 | CALL_EXPR_ARG (exp, i) = arg; | |
1771 | } | |
28ed065e | 1772 | |
93f28ca7 | 1773 | if (gimple_has_side_effects (stmt)) |
28ed065e MM |
1774 | TREE_SIDE_EFFECTS (exp) = 1; |
1775 | ||
93f28ca7 | 1776 | if (gimple_call_nothrow_p (stmt)) |
28ed065e MM |
1777 | TREE_NOTHROW (exp) = 1; |
1778 | ||
1779 | CALL_EXPR_TAILCALL (exp) = gimple_call_tail_p (stmt); | |
1780 | CALL_EXPR_RETURN_SLOT_OPT (exp) = gimple_call_return_slot_opt_p (stmt); | |
1781 | CALL_FROM_THUNK_P (exp) = gimple_call_from_thunk_p (stmt); | |
1782 | CALL_CANNOT_INLINE_P (exp) = gimple_call_cannot_inline_p (stmt); | |
1783 | CALL_EXPR_VA_ARG_PACK (exp) = gimple_call_va_arg_pack_p (stmt); | |
1784 | SET_EXPR_LOCATION (exp, gimple_location (stmt)); | |
1785 | TREE_BLOCK (exp) = gimple_block (stmt); | |
1786 | ||
28ed065e MM |
1787 | if (lhs) |
1788 | expand_assignment (lhs, exp, false); | |
1789 | else | |
1790 | expand_expr_real_1 (exp, const0_rtx, VOIDmode, EXPAND_NORMAL, NULL); | |
1791 | } | |
1792 | ||
1793 | /* A subroutine of expand_gimple_stmt, expanding one gimple statement | |
1794 | STMT that doesn't require special handling for outgoing edges. That | |
1795 | is no tailcalls and no GIMPLE_COND. */ | |
1796 | ||
1797 | static void | |
1798 | expand_gimple_stmt_1 (gimple stmt) | |
1799 | { | |
1800 | tree op0; | |
1801 | switch (gimple_code (stmt)) | |
1802 | { | |
1803 | case GIMPLE_GOTO: | |
1804 | op0 = gimple_goto_dest (stmt); | |
1805 | if (TREE_CODE (op0) == LABEL_DECL) | |
1806 | expand_goto (op0); | |
1807 | else | |
1808 | expand_computed_goto (op0); | |
1809 | break; | |
1810 | case GIMPLE_LABEL: | |
1811 | expand_label (gimple_label_label (stmt)); | |
1812 | break; | |
1813 | case GIMPLE_NOP: | |
1814 | case GIMPLE_PREDICT: | |
1815 | break; | |
28ed065e MM |
1816 | case GIMPLE_SWITCH: |
1817 | expand_case (stmt); | |
1818 | break; | |
1819 | case GIMPLE_ASM: | |
1820 | expand_asm_stmt (stmt); | |
1821 | break; | |
1822 | case GIMPLE_CALL: | |
1823 | expand_call_stmt (stmt); | |
1824 | break; | |
1825 | ||
1826 | case GIMPLE_RETURN: | |
1827 | op0 = gimple_return_retval (stmt); | |
1828 | ||
1829 | if (op0 && op0 != error_mark_node) | |
1830 | { | |
1831 | tree result = DECL_RESULT (current_function_decl); | |
1832 | ||
1833 | /* If we are not returning the current function's RESULT_DECL, | |
1834 | build an assignment to it. */ | |
1835 | if (op0 != result) | |
1836 | { | |
1837 | /* I believe that a function's RESULT_DECL is unique. */ | |
1838 | gcc_assert (TREE_CODE (op0) != RESULT_DECL); | |
1839 | ||
1840 | /* ??? We'd like to use simply expand_assignment here, | |
1841 | but this fails if the value is of BLKmode but the return | |
1842 | decl is a register. expand_return has special handling | |
1843 | for this combination, which eventually should move | |
1844 | to common code. See comments there. Until then, let's | |
1845 | build a modify expression :-/ */ | |
1846 | op0 = build2 (MODIFY_EXPR, TREE_TYPE (result), | |
1847 | result, op0); | |
1848 | } | |
1849 | } | |
1850 | if (!op0) | |
1851 | expand_null_return (); | |
1852 | else | |
1853 | expand_return (op0); | |
1854 | break; | |
1855 | ||
1856 | case GIMPLE_ASSIGN: | |
1857 | { | |
1858 | tree lhs = gimple_assign_lhs (stmt); | |
1859 | ||
1860 | /* Tree expand used to fiddle with |= and &= of two bitfield | |
1861 | COMPONENT_REFs here. This can't happen with gimple, the LHS | |
1862 | of binary assigns must be a gimple reg. */ | |
1863 | ||
1864 | if (TREE_CODE (lhs) != SSA_NAME | |
1865 | || get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1866 | == GIMPLE_SINGLE_RHS) | |
1867 | { | |
1868 | tree rhs = gimple_assign_rhs1 (stmt); | |
1869 | gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt)) | |
1870 | == GIMPLE_SINGLE_RHS); | |
1871 | if (gimple_has_location (stmt) && CAN_HAVE_LOCATION_P (rhs)) | |
1872 | SET_EXPR_LOCATION (rhs, gimple_location (stmt)); | |
1873 | expand_assignment (lhs, rhs, | |
1874 | gimple_assign_nontemporal_move_p (stmt)); | |
1875 | } | |
1876 | else | |
1877 | { | |
1878 | rtx target, temp; | |
1879 | bool nontemporal = gimple_assign_nontemporal_move_p (stmt); | |
1880 | struct separate_ops ops; | |
1881 | bool promoted = false; | |
1882 | ||
1883 | target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE); | |
1884 | if (GET_CODE (target) == SUBREG && SUBREG_PROMOTED_VAR_P (target)) | |
1885 | promoted = true; | |
1886 | ||
1887 | ops.code = gimple_assign_rhs_code (stmt); | |
1888 | ops.type = TREE_TYPE (lhs); | |
1889 | switch (get_gimple_rhs_class (gimple_expr_code (stmt))) | |
1890 | { | |
1891 | case GIMPLE_BINARY_RHS: | |
1892 | ops.op1 = gimple_assign_rhs2 (stmt); | |
1893 | /* Fallthru */ | |
1894 | case GIMPLE_UNARY_RHS: | |
1895 | ops.op0 = gimple_assign_rhs1 (stmt); | |
1896 | break; | |
1897 | default: | |
1898 | gcc_unreachable (); | |
1899 | } | |
1900 | ops.location = gimple_location (stmt); | |
1901 | ||
1902 | /* If we want to use a nontemporal store, force the value to | |
1903 | register first. If we store into a promoted register, | |
1904 | don't directly expand to target. */ | |
1905 | temp = nontemporal || promoted ? NULL_RTX : target; | |
1906 | temp = expand_expr_real_2 (&ops, temp, GET_MODE (target), | |
1907 | EXPAND_NORMAL); | |
1908 | ||
1909 | if (temp == target) | |
1910 | ; | |
1911 | else if (promoted) | |
1912 | { | |
4e18a7d4 | 1913 | int unsignedp = SUBREG_PROMOTED_UNSIGNED_P (target); |
28ed065e MM |
1914 | /* If TEMP is a VOIDmode constant, use convert_modes to make |
1915 | sure that we properly convert it. */ | |
1916 | if (CONSTANT_P (temp) && GET_MODE (temp) == VOIDmode) | |
1917 | { | |
1918 | temp = convert_modes (GET_MODE (target), | |
1919 | TYPE_MODE (ops.type), | |
4e18a7d4 | 1920 | temp, unsignedp); |
28ed065e | 1921 | temp = convert_modes (GET_MODE (SUBREG_REG (target)), |
4e18a7d4 | 1922 | GET_MODE (target), temp, unsignedp); |
28ed065e MM |
1923 | } |
1924 | ||
4e18a7d4 | 1925 | convert_move (SUBREG_REG (target), temp, unsignedp); |
28ed065e MM |
1926 | } |
1927 | else if (nontemporal && emit_storent_insn (target, temp)) | |
1928 | ; | |
1929 | else | |
1930 | { | |
1931 | temp = force_operand (temp, target); | |
1932 | if (temp != target) | |
1933 | emit_move_insn (target, temp); | |
1934 | } | |
1935 | } | |
1936 | } | |
1937 | break; | |
1938 | ||
1939 | default: | |
1940 | gcc_unreachable (); | |
1941 | } | |
1942 | } | |
1943 | ||
1944 | /* Expand one gimple statement STMT and return the last RTL instruction | |
1945 | before any of the newly generated ones. | |
1946 | ||
1947 | In addition to generating the necessary RTL instructions this also | |
1948 | sets REG_EH_REGION notes if necessary and sets the current source | |
1949 | location for diagnostics. */ | |
1950 | ||
1951 | static rtx | |
1952 | expand_gimple_stmt (gimple stmt) | |
1953 | { | |
1d65f45c | 1954 | int lp_nr = 0; |
28ed065e MM |
1955 | rtx last = NULL; |
1956 | location_t saved_location = input_location; | |
1957 | ||
1958 | last = get_last_insn (); | |
1959 | ||
1960 | /* If this is an expression of some kind and it has an associated line | |
1961 | number, then emit the line number before expanding the expression. | |
1962 | ||
1963 | We need to save and restore the file and line information so that | |
1964 | errors discovered during expansion are emitted with the right | |
1965 | information. It would be better of the diagnostic routines | |
1966 | used the file/line information embedded in the tree nodes rather | |
1967 | than globals. */ | |
1968 | gcc_assert (cfun); | |
1969 | ||
1970 | if (gimple_has_location (stmt)) | |
1971 | { | |
1972 | input_location = gimple_location (stmt); | |
1973 | set_curr_insn_source_location (input_location); | |
1974 | ||
1975 | /* Record where the insns produced belong. */ | |
1976 | set_curr_insn_block (gimple_block (stmt)); | |
1977 | } | |
1978 | ||
1979 | expand_gimple_stmt_1 (stmt); | |
1980 | /* Free any temporaries used to evaluate this statement. */ | |
1981 | free_temp_slots (); | |
1982 | ||
1983 | input_location = saved_location; | |
1984 | ||
1985 | /* Mark all insns that may trap. */ | |
1d65f45c RH |
1986 | lp_nr = lookup_stmt_eh_lp (stmt); |
1987 | if (lp_nr) | |
28ed065e MM |
1988 | { |
1989 | rtx insn; | |
1990 | for (insn = next_real_insn (last); insn; | |
1991 | insn = next_real_insn (insn)) | |
1992 | { | |
1993 | if (! find_reg_note (insn, REG_EH_REGION, NULL_RTX) | |
1994 | /* If we want exceptions for non-call insns, any | |
1995 | may_trap_p instruction may throw. */ | |
1996 | && GET_CODE (PATTERN (insn)) != CLOBBER | |
1997 | && GET_CODE (PATTERN (insn)) != USE | |
1d65f45c RH |
1998 | && insn_could_throw_p (insn)) |
1999 | make_reg_eh_region_note (insn, 0, lp_nr); | |
28ed065e MM |
2000 | } |
2001 | } | |
2002 | ||
2003 | return last; | |
2004 | } | |
2005 | ||
726a989a | 2006 | /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL |
224e770b RH |
2007 | that has CALL_EXPR_TAILCALL set. Returns non-null if we actually |
2008 | generated a tail call (something that might be denied by the ABI | |
cea49550 RH |
2009 | rules governing the call; see calls.c). |
2010 | ||
2011 | Sets CAN_FALLTHRU if we generated a *conditional* tail call, and | |
2012 | can still reach the rest of BB. The case here is __builtin_sqrt, | |
2013 | where the NaN result goes through the external function (with a | |
2014 | tailcall) and the normal result happens via a sqrt instruction. */ | |
80c7a9eb RH |
2015 | |
2016 | static basic_block | |
726a989a | 2017 | expand_gimple_tailcall (basic_block bb, gimple stmt, bool *can_fallthru) |
80c7a9eb | 2018 | { |
b7211528 | 2019 | rtx last2, last; |
224e770b | 2020 | edge e; |
628f6a4e | 2021 | edge_iterator ei; |
224e770b RH |
2022 | int probability; |
2023 | gcov_type count; | |
80c7a9eb | 2024 | |
28ed065e | 2025 | last2 = last = expand_gimple_stmt (stmt); |
80c7a9eb RH |
2026 | |
2027 | for (last = NEXT_INSN (last); last; last = NEXT_INSN (last)) | |
224e770b RH |
2028 | if (CALL_P (last) && SIBLING_CALL_P (last)) |
2029 | goto found; | |
80c7a9eb | 2030 | |
726a989a | 2031 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 2032 | |
cea49550 | 2033 | *can_fallthru = true; |
224e770b | 2034 | return NULL; |
80c7a9eb | 2035 | |
224e770b RH |
2036 | found: |
2037 | /* ??? Wouldn't it be better to just reset any pending stack adjust? | |
2038 | Any instructions emitted here are about to be deleted. */ | |
2039 | do_pending_stack_adjust (); | |
2040 | ||
2041 | /* Remove any non-eh, non-abnormal edges that don't go to exit. */ | |
2042 | /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be | |
2043 | EH or abnormal edges, we shouldn't have created a tail call in | |
2044 | the first place. So it seems to me we should just be removing | |
2045 | all edges here, or redirecting the existing fallthru edge to | |
2046 | the exit block. */ | |
2047 | ||
224e770b RH |
2048 | probability = 0; |
2049 | count = 0; | |
224e770b | 2050 | |
628f6a4e BE |
2051 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) |
2052 | { | |
224e770b RH |
2053 | if (!(e->flags & (EDGE_ABNORMAL | EDGE_EH))) |
2054 | { | |
2055 | if (e->dest != EXIT_BLOCK_PTR) | |
80c7a9eb | 2056 | { |
224e770b RH |
2057 | e->dest->count -= e->count; |
2058 | e->dest->frequency -= EDGE_FREQUENCY (e); | |
2059 | if (e->dest->count < 0) | |
c22cacf3 | 2060 | e->dest->count = 0; |
224e770b | 2061 | if (e->dest->frequency < 0) |
c22cacf3 | 2062 | e->dest->frequency = 0; |
80c7a9eb | 2063 | } |
224e770b RH |
2064 | count += e->count; |
2065 | probability += e->probability; | |
2066 | remove_edge (e); | |
80c7a9eb | 2067 | } |
628f6a4e BE |
2068 | else |
2069 | ei_next (&ei); | |
80c7a9eb RH |
2070 | } |
2071 | ||
224e770b RH |
2072 | /* This is somewhat ugly: the call_expr expander often emits instructions |
2073 | after the sibcall (to perform the function return). These confuse the | |
12eff7b7 | 2074 | find_many_sub_basic_blocks code, so we need to get rid of these. */ |
224e770b | 2075 | last = NEXT_INSN (last); |
341c100f | 2076 | gcc_assert (BARRIER_P (last)); |
cea49550 RH |
2077 | |
2078 | *can_fallthru = false; | |
224e770b RH |
2079 | while (NEXT_INSN (last)) |
2080 | { | |
2081 | /* For instance an sqrt builtin expander expands if with | |
2082 | sibcall in the then and label for `else`. */ | |
2083 | if (LABEL_P (NEXT_INSN (last))) | |
cea49550 RH |
2084 | { |
2085 | *can_fallthru = true; | |
2086 | break; | |
2087 | } | |
224e770b RH |
2088 | delete_insn (NEXT_INSN (last)); |
2089 | } | |
2090 | ||
2091 | e = make_edge (bb, EXIT_BLOCK_PTR, EDGE_ABNORMAL | EDGE_SIBCALL); | |
2092 | e->probability += probability; | |
2093 | e->count += count; | |
2094 | BB_END (bb) = last; | |
2095 | update_bb_for_insn (bb); | |
2096 | ||
2097 | if (NEXT_INSN (last)) | |
2098 | { | |
2099 | bb = create_basic_block (NEXT_INSN (last), get_last_insn (), bb); | |
2100 | ||
2101 | last = BB_END (bb); | |
2102 | if (BARRIER_P (last)) | |
2103 | BB_END (bb) = PREV_INSN (last); | |
2104 | } | |
2105 | ||
726a989a | 2106 | maybe_dump_rtl_for_gimple_stmt (stmt, last2); |
b7211528 | 2107 | |
224e770b | 2108 | return bb; |
80c7a9eb RH |
2109 | } |
2110 | ||
b5b8b0ac AO |
2111 | /* Return the difference between the floor and the truncated result of |
2112 | a signed division by OP1 with remainder MOD. */ | |
2113 | static rtx | |
2114 | floor_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2115 | { | |
2116 | /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */ | |
2117 | return gen_rtx_IF_THEN_ELSE | |
2118 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2119 | gen_rtx_IF_THEN_ELSE | |
2120 | (mode, gen_rtx_LT (BImode, | |
2121 | gen_rtx_DIV (mode, op1, mod), | |
2122 | const0_rtx), | |
2123 | constm1_rtx, const0_rtx), | |
2124 | const0_rtx); | |
2125 | } | |
2126 | ||
2127 | /* Return the difference between the ceil and the truncated result of | |
2128 | a signed division by OP1 with remainder MOD. */ | |
2129 | static rtx | |
2130 | ceil_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2131 | { | |
2132 | /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */ | |
2133 | return gen_rtx_IF_THEN_ELSE | |
2134 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2135 | gen_rtx_IF_THEN_ELSE | |
2136 | (mode, gen_rtx_GT (BImode, | |
2137 | gen_rtx_DIV (mode, op1, mod), | |
2138 | const0_rtx), | |
2139 | const1_rtx, const0_rtx), | |
2140 | const0_rtx); | |
2141 | } | |
2142 | ||
2143 | /* Return the difference between the ceil and the truncated result of | |
2144 | an unsigned division by OP1 with remainder MOD. */ | |
2145 | static rtx | |
2146 | ceil_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1 ATTRIBUTE_UNUSED) | |
2147 | { | |
2148 | /* (mod != 0 ? 1 : 0) */ | |
2149 | return gen_rtx_IF_THEN_ELSE | |
2150 | (mode, gen_rtx_NE (BImode, mod, const0_rtx), | |
2151 | const1_rtx, const0_rtx); | |
2152 | } | |
2153 | ||
2154 | /* Return the difference between the rounded and the truncated result | |
2155 | of a signed division by OP1 with remainder MOD. Halfway cases are | |
2156 | rounded away from zero, rather than to the nearest even number. */ | |
2157 | static rtx | |
2158 | round_sdiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2159 | { | |
2160 | /* (abs (mod) >= abs (op1) - abs (mod) | |
2161 | ? (op1 / mod > 0 ? 1 : -1) | |
2162 | : 0) */ | |
2163 | return gen_rtx_IF_THEN_ELSE | |
2164 | (mode, gen_rtx_GE (BImode, gen_rtx_ABS (mode, mod), | |
2165 | gen_rtx_MINUS (mode, | |
2166 | gen_rtx_ABS (mode, op1), | |
2167 | gen_rtx_ABS (mode, mod))), | |
2168 | gen_rtx_IF_THEN_ELSE | |
2169 | (mode, gen_rtx_GT (BImode, | |
2170 | gen_rtx_DIV (mode, op1, mod), | |
2171 | const0_rtx), | |
2172 | const1_rtx, constm1_rtx), | |
2173 | const0_rtx); | |
2174 | } | |
2175 | ||
2176 | /* Return the difference between the rounded and the truncated result | |
2177 | of a unsigned division by OP1 with remainder MOD. Halfway cases | |
2178 | are rounded away from zero, rather than to the nearest even | |
2179 | number. */ | |
2180 | static rtx | |
2181 | round_udiv_adjust (enum machine_mode mode, rtx mod, rtx op1) | |
2182 | { | |
2183 | /* (mod >= op1 - mod ? 1 : 0) */ | |
2184 | return gen_rtx_IF_THEN_ELSE | |
2185 | (mode, gen_rtx_GE (BImode, mod, | |
2186 | gen_rtx_MINUS (mode, op1, mod)), | |
2187 | const1_rtx, const0_rtx); | |
2188 | } | |
2189 | ||
dda2da58 AO |
2190 | /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting |
2191 | any rtl. */ | |
2192 | ||
2193 | static rtx | |
2194 | convert_debug_memory_address (enum machine_mode mode, rtx x) | |
2195 | { | |
2196 | enum machine_mode xmode = GET_MODE (x); | |
2197 | ||
2198 | #ifndef POINTERS_EXTEND_UNSIGNED | |
2199 | gcc_assert (mode == Pmode); | |
2200 | gcc_assert (xmode == mode || xmode == VOIDmode); | |
2201 | #else | |
2202 | gcc_assert (mode == Pmode || mode == ptr_mode); | |
2203 | ||
2204 | if (GET_MODE (x) == mode || GET_MODE (x) == VOIDmode) | |
2205 | return x; | |
2206 | ||
2207 | if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (xmode)) | |
2208 | x = simplify_gen_subreg (mode, x, xmode, | |
2209 | subreg_lowpart_offset | |
2210 | (mode, xmode)); | |
2211 | else if (POINTERS_EXTEND_UNSIGNED > 0) | |
2212 | x = gen_rtx_ZERO_EXTEND (mode, x); | |
2213 | else if (!POINTERS_EXTEND_UNSIGNED) | |
2214 | x = gen_rtx_SIGN_EXTEND (mode, x); | |
2215 | else | |
2216 | gcc_unreachable (); | |
2217 | #endif /* POINTERS_EXTEND_UNSIGNED */ | |
2218 | ||
2219 | return x; | |
2220 | } | |
2221 | ||
b5b8b0ac AO |
2222 | /* Return an RTX equivalent to the value of the tree expression |
2223 | EXP. */ | |
2224 | ||
2225 | static rtx | |
2226 | expand_debug_expr (tree exp) | |
2227 | { | |
2228 | rtx op0 = NULL_RTX, op1 = NULL_RTX, op2 = NULL_RTX; | |
2229 | enum machine_mode mode = TYPE_MODE (TREE_TYPE (exp)); | |
2230 | int unsignedp = TYPE_UNSIGNED (TREE_TYPE (exp)); | |
09e881c9 | 2231 | addr_space_t as; |
d4ebfa65 | 2232 | enum machine_mode address_mode; |
b5b8b0ac AO |
2233 | |
2234 | switch (TREE_CODE_CLASS (TREE_CODE (exp))) | |
2235 | { | |
2236 | case tcc_expression: | |
2237 | switch (TREE_CODE (exp)) | |
2238 | { | |
2239 | case COND_EXPR: | |
7ece48b1 | 2240 | case DOT_PROD_EXPR: |
b5b8b0ac AO |
2241 | goto ternary; |
2242 | ||
2243 | case TRUTH_ANDIF_EXPR: | |
2244 | case TRUTH_ORIF_EXPR: | |
2245 | case TRUTH_AND_EXPR: | |
2246 | case TRUTH_OR_EXPR: | |
2247 | case TRUTH_XOR_EXPR: | |
2248 | goto binary; | |
2249 | ||
2250 | case TRUTH_NOT_EXPR: | |
2251 | goto unary; | |
2252 | ||
2253 | default: | |
2254 | break; | |
2255 | } | |
2256 | break; | |
2257 | ||
2258 | ternary: | |
2259 | op2 = expand_debug_expr (TREE_OPERAND (exp, 2)); | |
2260 | if (!op2) | |
2261 | return NULL_RTX; | |
2262 | /* Fall through. */ | |
2263 | ||
2264 | binary: | |
2265 | case tcc_binary: | |
2266 | case tcc_comparison: | |
2267 | op1 = expand_debug_expr (TREE_OPERAND (exp, 1)); | |
2268 | if (!op1) | |
2269 | return NULL_RTX; | |
2270 | /* Fall through. */ | |
2271 | ||
2272 | unary: | |
2273 | case tcc_unary: | |
2274 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2275 | if (!op0) | |
2276 | return NULL_RTX; | |
2277 | break; | |
2278 | ||
2279 | case tcc_type: | |
2280 | case tcc_statement: | |
2281 | gcc_unreachable (); | |
2282 | ||
2283 | case tcc_constant: | |
2284 | case tcc_exceptional: | |
2285 | case tcc_declaration: | |
2286 | case tcc_reference: | |
2287 | case tcc_vl_exp: | |
2288 | break; | |
2289 | } | |
2290 | ||
2291 | switch (TREE_CODE (exp)) | |
2292 | { | |
2293 | case STRING_CST: | |
2294 | if (!lookup_constant_def (exp)) | |
2295 | { | |
e1b243a8 JJ |
2296 | if (strlen (TREE_STRING_POINTER (exp)) + 1 |
2297 | != (size_t) TREE_STRING_LENGTH (exp)) | |
2298 | return NULL_RTX; | |
b5b8b0ac AO |
2299 | op0 = gen_rtx_CONST_STRING (Pmode, TREE_STRING_POINTER (exp)); |
2300 | op0 = gen_rtx_MEM (BLKmode, op0); | |
2301 | set_mem_attributes (op0, exp, 0); | |
2302 | return op0; | |
2303 | } | |
2304 | /* Fall through... */ | |
2305 | ||
2306 | case INTEGER_CST: | |
2307 | case REAL_CST: | |
2308 | case FIXED_CST: | |
2309 | op0 = expand_expr (exp, NULL_RTX, mode, EXPAND_INITIALIZER); | |
2310 | return op0; | |
2311 | ||
2312 | case COMPLEX_CST: | |
2313 | gcc_assert (COMPLEX_MODE_P (mode)); | |
2314 | op0 = expand_debug_expr (TREE_REALPART (exp)); | |
b5b8b0ac | 2315 | op1 = expand_debug_expr (TREE_IMAGPART (exp)); |
b5b8b0ac AO |
2316 | return gen_rtx_CONCAT (mode, op0, op1); |
2317 | ||
0ca5af51 AO |
2318 | case DEBUG_EXPR_DECL: |
2319 | op0 = DECL_RTL_IF_SET (exp); | |
2320 | ||
2321 | if (op0) | |
2322 | return op0; | |
2323 | ||
2324 | op0 = gen_rtx_DEBUG_EXPR (mode); | |
e4fb38bd | 2325 | DEBUG_EXPR_TREE_DECL (op0) = exp; |
0ca5af51 AO |
2326 | SET_DECL_RTL (exp, op0); |
2327 | ||
2328 | return op0; | |
2329 | ||
b5b8b0ac AO |
2330 | case VAR_DECL: |
2331 | case PARM_DECL: | |
2332 | case FUNCTION_DECL: | |
2333 | case LABEL_DECL: | |
2334 | case CONST_DECL: | |
2335 | case RESULT_DECL: | |
2336 | op0 = DECL_RTL_IF_SET (exp); | |
2337 | ||
2338 | /* This decl was probably optimized away. */ | |
2339 | if (!op0) | |
e1b243a8 JJ |
2340 | { |
2341 | if (TREE_CODE (exp) != VAR_DECL | |
2342 | || DECL_EXTERNAL (exp) | |
2343 | || !TREE_STATIC (exp) | |
2344 | || !DECL_NAME (exp) | |
0fba566c JJ |
2345 | || DECL_HARD_REGISTER (exp) |
2346 | || mode == VOIDmode) | |
e1b243a8 JJ |
2347 | return NULL; |
2348 | ||
b1aa0655 | 2349 | op0 = make_decl_rtl_for_debug (exp); |
e1b243a8 JJ |
2350 | if (!MEM_P (op0) |
2351 | || GET_CODE (XEXP (op0, 0)) != SYMBOL_REF | |
2352 | || SYMBOL_REF_DECL (XEXP (op0, 0)) != exp) | |
2353 | return NULL; | |
2354 | } | |
2355 | else | |
2356 | op0 = copy_rtx (op0); | |
b5b8b0ac | 2357 | |
06796564 JJ |
2358 | if (GET_MODE (op0) == BLKmode |
2359 | /* If op0 is not BLKmode, but BLKmode is, adjust_mode | |
2360 | below would ICE. While it is likely a FE bug, | |
2361 | try to be robust here. See PR43166. */ | |
132b4e82 JJ |
2362 | || mode == BLKmode |
2363 | || (mode == VOIDmode && GET_MODE (op0) != VOIDmode)) | |
b5b8b0ac AO |
2364 | { |
2365 | gcc_assert (MEM_P (op0)); | |
2366 | op0 = adjust_address_nv (op0, mode, 0); | |
2367 | return op0; | |
2368 | } | |
2369 | ||
2370 | /* Fall through. */ | |
2371 | ||
2372 | adjust_mode: | |
2373 | case PAREN_EXPR: | |
2374 | case NOP_EXPR: | |
2375 | case CONVERT_EXPR: | |
2376 | { | |
2377 | enum machine_mode inner_mode = GET_MODE (op0); | |
2378 | ||
2379 | if (mode == inner_mode) | |
2380 | return op0; | |
2381 | ||
2382 | if (inner_mode == VOIDmode) | |
2383 | { | |
2a8e30fb MM |
2384 | if (TREE_CODE (exp) == SSA_NAME) |
2385 | inner_mode = TYPE_MODE (TREE_TYPE (exp)); | |
2386 | else | |
2387 | inner_mode = TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp, 0))); | |
b5b8b0ac AO |
2388 | if (mode == inner_mode) |
2389 | return op0; | |
2390 | } | |
2391 | ||
2392 | if (FLOAT_MODE_P (mode) && FLOAT_MODE_P (inner_mode)) | |
2393 | { | |
2394 | if (GET_MODE_BITSIZE (mode) == GET_MODE_BITSIZE (inner_mode)) | |
2395 | op0 = simplify_gen_subreg (mode, op0, inner_mode, 0); | |
2396 | else if (GET_MODE_BITSIZE (mode) < GET_MODE_BITSIZE (inner_mode)) | |
2397 | op0 = simplify_gen_unary (FLOAT_TRUNCATE, mode, op0, inner_mode); | |
2398 | else | |
2399 | op0 = simplify_gen_unary (FLOAT_EXTEND, mode, op0, inner_mode); | |
2400 | } | |
2401 | else if (FLOAT_MODE_P (mode)) | |
2402 | { | |
2a8e30fb | 2403 | gcc_assert (TREE_CODE (exp) != SSA_NAME); |
b5b8b0ac AO |
2404 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
2405 | op0 = simplify_gen_unary (UNSIGNED_FLOAT, mode, op0, inner_mode); | |
2406 | else | |
2407 | op0 = simplify_gen_unary (FLOAT, mode, op0, inner_mode); | |
2408 | } | |
2409 | else if (FLOAT_MODE_P (inner_mode)) | |
2410 | { | |
2411 | if (unsignedp) | |
2412 | op0 = simplify_gen_unary (UNSIGNED_FIX, mode, op0, inner_mode); | |
2413 | else | |
2414 | op0 = simplify_gen_unary (FIX, mode, op0, inner_mode); | |
2415 | } | |
2416 | else if (CONSTANT_P (op0) | |
2417 | || GET_MODE_BITSIZE (mode) <= GET_MODE_BITSIZE (inner_mode)) | |
2418 | op0 = simplify_gen_subreg (mode, op0, inner_mode, | |
2419 | subreg_lowpart_offset (mode, | |
2420 | inner_mode)); | |
2421 | else if (unsignedp) | |
2422 | op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
2423 | else | |
2424 | op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
2425 | ||
2426 | return op0; | |
2427 | } | |
2428 | ||
2429 | case INDIRECT_REF: | |
2430 | case ALIGN_INDIRECT_REF: | |
2431 | case MISALIGNED_INDIRECT_REF: | |
2432 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2433 | if (!op0) | |
2434 | return NULL; | |
2435 | ||
09e881c9 | 2436 | if (POINTER_TYPE_P (TREE_TYPE (exp))) |
4e25ca6b EB |
2437 | { |
2438 | as = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp))); | |
2439 | address_mode = targetm.addr_space.address_mode (as); | |
2440 | } | |
09e881c9 | 2441 | else |
4e25ca6b EB |
2442 | { |
2443 | as = ADDR_SPACE_GENERIC; | |
2444 | address_mode = Pmode; | |
2445 | } | |
b5b8b0ac AO |
2446 | |
2447 | if (TREE_CODE (exp) == ALIGN_INDIRECT_REF) | |
2448 | { | |
2449 | int align = TYPE_ALIGN_UNIT (TREE_TYPE (exp)); | |
d4ebfa65 | 2450 | op0 = gen_rtx_AND (address_mode, op0, GEN_INT (-align)); |
b5b8b0ac AO |
2451 | } |
2452 | ||
2453 | op0 = gen_rtx_MEM (mode, op0); | |
2454 | ||
2455 | set_mem_attributes (op0, exp, 0); | |
09e881c9 | 2456 | set_mem_addr_space (op0, as); |
b5b8b0ac AO |
2457 | |
2458 | return op0; | |
2459 | ||
2460 | case TARGET_MEM_REF: | |
2461 | if (TMR_SYMBOL (exp) && !DECL_RTL_SET_P (TMR_SYMBOL (exp))) | |
2462 | return NULL; | |
2463 | ||
2464 | op0 = expand_debug_expr | |
4e25ca6b | 2465 | (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp)), exp)); |
b5b8b0ac AO |
2466 | if (!op0) |
2467 | return NULL; | |
2468 | ||
09e881c9 | 2469 | as = TYPE_ADDR_SPACE (TREE_TYPE (exp)); |
b5b8b0ac AO |
2470 | |
2471 | op0 = gen_rtx_MEM (mode, op0); | |
2472 | ||
2473 | set_mem_attributes (op0, exp, 0); | |
09e881c9 | 2474 | set_mem_addr_space (op0, as); |
b5b8b0ac AO |
2475 | |
2476 | return op0; | |
2477 | ||
2478 | case ARRAY_REF: | |
2479 | case ARRAY_RANGE_REF: | |
2480 | case COMPONENT_REF: | |
2481 | case BIT_FIELD_REF: | |
2482 | case REALPART_EXPR: | |
2483 | case IMAGPART_EXPR: | |
2484 | case VIEW_CONVERT_EXPR: | |
2485 | { | |
2486 | enum machine_mode mode1; | |
2487 | HOST_WIDE_INT bitsize, bitpos; | |
2488 | tree offset; | |
2489 | int volatilep = 0; | |
2490 | tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset, | |
2491 | &mode1, &unsignedp, &volatilep, false); | |
2492 | rtx orig_op0; | |
2493 | ||
4f2a9af8 JJ |
2494 | if (bitsize == 0) |
2495 | return NULL; | |
2496 | ||
b5b8b0ac AO |
2497 | orig_op0 = op0 = expand_debug_expr (tem); |
2498 | ||
2499 | if (!op0) | |
2500 | return NULL; | |
2501 | ||
2502 | if (offset) | |
2503 | { | |
dda2da58 AO |
2504 | enum machine_mode addrmode, offmode; |
2505 | ||
aa847cc8 JJ |
2506 | if (!MEM_P (op0)) |
2507 | return NULL; | |
b5b8b0ac | 2508 | |
dda2da58 AO |
2509 | op0 = XEXP (op0, 0); |
2510 | addrmode = GET_MODE (op0); | |
2511 | if (addrmode == VOIDmode) | |
2512 | addrmode = Pmode; | |
2513 | ||
b5b8b0ac AO |
2514 | op1 = expand_debug_expr (offset); |
2515 | if (!op1) | |
2516 | return NULL; | |
2517 | ||
dda2da58 AO |
2518 | offmode = GET_MODE (op1); |
2519 | if (offmode == VOIDmode) | |
2520 | offmode = TYPE_MODE (TREE_TYPE (offset)); | |
2521 | ||
2522 | if (addrmode != offmode) | |
2523 | op1 = simplify_gen_subreg (addrmode, op1, offmode, | |
2524 | subreg_lowpart_offset (addrmode, | |
2525 | offmode)); | |
2526 | ||
2527 | /* Don't use offset_address here, we don't need a | |
2528 | recognizable address, and we don't want to generate | |
2529 | code. */ | |
2530 | op0 = gen_rtx_MEM (mode, gen_rtx_PLUS (addrmode, op0, op1)); | |
b5b8b0ac AO |
2531 | } |
2532 | ||
2533 | if (MEM_P (op0)) | |
2534 | { | |
4f2a9af8 JJ |
2535 | if (mode1 == VOIDmode) |
2536 | /* Bitfield. */ | |
2537 | mode1 = smallest_mode_for_size (bitsize, MODE_INT); | |
b5b8b0ac AO |
2538 | if (bitpos >= BITS_PER_UNIT) |
2539 | { | |
2540 | op0 = adjust_address_nv (op0, mode1, bitpos / BITS_PER_UNIT); | |
2541 | bitpos %= BITS_PER_UNIT; | |
2542 | } | |
2543 | else if (bitpos < 0) | |
2544 | { | |
4f2a9af8 JJ |
2545 | HOST_WIDE_INT units |
2546 | = (-bitpos + BITS_PER_UNIT - 1) / BITS_PER_UNIT; | |
b5b8b0ac AO |
2547 | op0 = adjust_address_nv (op0, mode1, units); |
2548 | bitpos += units * BITS_PER_UNIT; | |
2549 | } | |
2550 | else if (bitpos == 0 && bitsize == GET_MODE_BITSIZE (mode)) | |
2551 | op0 = adjust_address_nv (op0, mode, 0); | |
2552 | else if (GET_MODE (op0) != mode1) | |
2553 | op0 = adjust_address_nv (op0, mode1, 0); | |
2554 | else | |
2555 | op0 = copy_rtx (op0); | |
2556 | if (op0 == orig_op0) | |
2557 | op0 = shallow_copy_rtx (op0); | |
2558 | set_mem_attributes (op0, exp, 0); | |
2559 | } | |
2560 | ||
2561 | if (bitpos == 0 && mode == GET_MODE (op0)) | |
2562 | return op0; | |
2563 | ||
2d3fc6aa JJ |
2564 | if (bitpos < 0) |
2565 | return NULL; | |
2566 | ||
88c04a5d JJ |
2567 | if (GET_MODE (op0) == BLKmode) |
2568 | return NULL; | |
2569 | ||
b5b8b0ac AO |
2570 | if ((bitpos % BITS_PER_UNIT) == 0 |
2571 | && bitsize == GET_MODE_BITSIZE (mode1)) | |
2572 | { | |
2573 | enum machine_mode opmode = GET_MODE (op0); | |
2574 | ||
b5b8b0ac AO |
2575 | if (opmode == VOIDmode) |
2576 | opmode = mode1; | |
2577 | ||
2578 | /* This condition may hold if we're expanding the address | |
2579 | right past the end of an array that turned out not to | |
2580 | be addressable (i.e., the address was only computed in | |
2581 | debug stmts). The gen_subreg below would rightfully | |
2582 | crash, and the address doesn't really exist, so just | |
2583 | drop it. */ | |
2584 | if (bitpos >= GET_MODE_BITSIZE (opmode)) | |
2585 | return NULL; | |
2586 | ||
7d5d39bb JJ |
2587 | if ((bitpos % GET_MODE_BITSIZE (mode)) == 0) |
2588 | return simplify_gen_subreg (mode, op0, opmode, | |
2589 | bitpos / BITS_PER_UNIT); | |
b5b8b0ac AO |
2590 | } |
2591 | ||
2592 | return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
2593 | && TYPE_UNSIGNED (TREE_TYPE (exp)) | |
2594 | ? SIGN_EXTRACT | |
2595 | : ZERO_EXTRACT, mode, | |
2596 | GET_MODE (op0) != VOIDmode | |
2597 | ? GET_MODE (op0) : mode1, | |
2598 | op0, GEN_INT (bitsize), GEN_INT (bitpos)); | |
2599 | } | |
2600 | ||
b5b8b0ac AO |
2601 | case ABS_EXPR: |
2602 | return gen_rtx_ABS (mode, op0); | |
2603 | ||
2604 | case NEGATE_EXPR: | |
2605 | return gen_rtx_NEG (mode, op0); | |
2606 | ||
2607 | case BIT_NOT_EXPR: | |
2608 | return gen_rtx_NOT (mode, op0); | |
2609 | ||
2610 | case FLOAT_EXPR: | |
2611 | if (unsignedp) | |
2612 | return gen_rtx_UNSIGNED_FLOAT (mode, op0); | |
2613 | else | |
2614 | return gen_rtx_FLOAT (mode, op0); | |
2615 | ||
2616 | case FIX_TRUNC_EXPR: | |
2617 | if (unsignedp) | |
2618 | return gen_rtx_UNSIGNED_FIX (mode, op0); | |
2619 | else | |
2620 | return gen_rtx_FIX (mode, op0); | |
2621 | ||
2622 | case POINTER_PLUS_EXPR: | |
576319a7 DD |
2623 | /* For the rare target where pointers are not the same size as |
2624 | size_t, we need to check for mis-matched modes and correct | |
2625 | the addend. */ | |
2626 | if (op0 && op1 | |
2627 | && GET_MODE (op0) != VOIDmode && GET_MODE (op1) != VOIDmode | |
2628 | && GET_MODE (op0) != GET_MODE (op1)) | |
2629 | { | |
2630 | if (GET_MODE_BITSIZE (GET_MODE (op0)) < GET_MODE_BITSIZE (GET_MODE (op1))) | |
2631 | op1 = gen_rtx_TRUNCATE (GET_MODE (op0), op1); | |
2632 | else | |
2633 | /* We always sign-extend, regardless of the signedness of | |
2634 | the operand, because the operand is always unsigned | |
2635 | here even if the original C expression is signed. */ | |
2636 | op1 = gen_rtx_SIGN_EXTEND (GET_MODE (op0), op1); | |
2637 | } | |
2638 | /* Fall through. */ | |
b5b8b0ac AO |
2639 | case PLUS_EXPR: |
2640 | return gen_rtx_PLUS (mode, op0, op1); | |
2641 | ||
2642 | case MINUS_EXPR: | |
2643 | return gen_rtx_MINUS (mode, op0, op1); | |
2644 | ||
2645 | case MULT_EXPR: | |
2646 | return gen_rtx_MULT (mode, op0, op1); | |
2647 | ||
2648 | case RDIV_EXPR: | |
2649 | case TRUNC_DIV_EXPR: | |
2650 | case EXACT_DIV_EXPR: | |
2651 | if (unsignedp) | |
2652 | return gen_rtx_UDIV (mode, op0, op1); | |
2653 | else | |
2654 | return gen_rtx_DIV (mode, op0, op1); | |
2655 | ||
2656 | case TRUNC_MOD_EXPR: | |
2657 | if (unsignedp) | |
2658 | return gen_rtx_UMOD (mode, op0, op1); | |
2659 | else | |
2660 | return gen_rtx_MOD (mode, op0, op1); | |
2661 | ||
2662 | case FLOOR_DIV_EXPR: | |
2663 | if (unsignedp) | |
2664 | return gen_rtx_UDIV (mode, op0, op1); | |
2665 | else | |
2666 | { | |
2667 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2668 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2669 | rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2670 | return gen_rtx_PLUS (mode, div, adj); | |
2671 | } | |
2672 | ||
2673 | case FLOOR_MOD_EXPR: | |
2674 | if (unsignedp) | |
2675 | return gen_rtx_UMOD (mode, op0, op1); | |
2676 | else | |
2677 | { | |
2678 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2679 | rtx adj = floor_sdiv_adjust (mode, mod, op1); | |
2680 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2681 | return gen_rtx_PLUS (mode, mod, adj); | |
2682 | } | |
2683 | ||
2684 | case CEIL_DIV_EXPR: | |
2685 | if (unsignedp) | |
2686 | { | |
2687 | rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2688 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2689 | rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2690 | return gen_rtx_PLUS (mode, div, adj); | |
2691 | } | |
2692 | else | |
2693 | { | |
2694 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2695 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2696 | rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2697 | return gen_rtx_PLUS (mode, div, adj); | |
2698 | } | |
2699 | ||
2700 | case CEIL_MOD_EXPR: | |
2701 | if (unsignedp) | |
2702 | { | |
2703 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2704 | rtx adj = ceil_udiv_adjust (mode, mod, op1); | |
2705 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2706 | return gen_rtx_PLUS (mode, mod, adj); | |
2707 | } | |
2708 | else | |
2709 | { | |
2710 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2711 | rtx adj = ceil_sdiv_adjust (mode, mod, op1); | |
2712 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2713 | return gen_rtx_PLUS (mode, mod, adj); | |
2714 | } | |
2715 | ||
2716 | case ROUND_DIV_EXPR: | |
2717 | if (unsignedp) | |
2718 | { | |
2719 | rtx div = gen_rtx_UDIV (mode, op0, op1); | |
2720 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2721 | rtx adj = round_udiv_adjust (mode, mod, op1); | |
2722 | return gen_rtx_PLUS (mode, div, adj); | |
2723 | } | |
2724 | else | |
2725 | { | |
2726 | rtx div = gen_rtx_DIV (mode, op0, op1); | |
2727 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2728 | rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2729 | return gen_rtx_PLUS (mode, div, adj); | |
2730 | } | |
2731 | ||
2732 | case ROUND_MOD_EXPR: | |
2733 | if (unsignedp) | |
2734 | { | |
2735 | rtx mod = gen_rtx_UMOD (mode, op0, op1); | |
2736 | rtx adj = round_udiv_adjust (mode, mod, op1); | |
2737 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2738 | return gen_rtx_PLUS (mode, mod, adj); | |
2739 | } | |
2740 | else | |
2741 | { | |
2742 | rtx mod = gen_rtx_MOD (mode, op0, op1); | |
2743 | rtx adj = round_sdiv_adjust (mode, mod, op1); | |
2744 | adj = gen_rtx_NEG (mode, gen_rtx_MULT (mode, adj, op1)); | |
2745 | return gen_rtx_PLUS (mode, mod, adj); | |
2746 | } | |
2747 | ||
2748 | case LSHIFT_EXPR: | |
2749 | return gen_rtx_ASHIFT (mode, op0, op1); | |
2750 | ||
2751 | case RSHIFT_EXPR: | |
2752 | if (unsignedp) | |
2753 | return gen_rtx_LSHIFTRT (mode, op0, op1); | |
2754 | else | |
2755 | return gen_rtx_ASHIFTRT (mode, op0, op1); | |
2756 | ||
2757 | case LROTATE_EXPR: | |
2758 | return gen_rtx_ROTATE (mode, op0, op1); | |
2759 | ||
2760 | case RROTATE_EXPR: | |
2761 | return gen_rtx_ROTATERT (mode, op0, op1); | |
2762 | ||
2763 | case MIN_EXPR: | |
2764 | if (unsignedp) | |
2765 | return gen_rtx_UMIN (mode, op0, op1); | |
2766 | else | |
2767 | return gen_rtx_SMIN (mode, op0, op1); | |
2768 | ||
2769 | case MAX_EXPR: | |
2770 | if (unsignedp) | |
2771 | return gen_rtx_UMAX (mode, op0, op1); | |
2772 | else | |
2773 | return gen_rtx_SMAX (mode, op0, op1); | |
2774 | ||
2775 | case BIT_AND_EXPR: | |
2776 | case TRUTH_AND_EXPR: | |
2777 | return gen_rtx_AND (mode, op0, op1); | |
2778 | ||
2779 | case BIT_IOR_EXPR: | |
2780 | case TRUTH_OR_EXPR: | |
2781 | return gen_rtx_IOR (mode, op0, op1); | |
2782 | ||
2783 | case BIT_XOR_EXPR: | |
2784 | case TRUTH_XOR_EXPR: | |
2785 | return gen_rtx_XOR (mode, op0, op1); | |
2786 | ||
2787 | case TRUTH_ANDIF_EXPR: | |
2788 | return gen_rtx_IF_THEN_ELSE (mode, op0, op1, const0_rtx); | |
2789 | ||
2790 | case TRUTH_ORIF_EXPR: | |
2791 | return gen_rtx_IF_THEN_ELSE (mode, op0, const_true_rtx, op1); | |
2792 | ||
2793 | case TRUTH_NOT_EXPR: | |
2794 | return gen_rtx_EQ (mode, op0, const0_rtx); | |
2795 | ||
2796 | case LT_EXPR: | |
2797 | if (unsignedp) | |
2798 | return gen_rtx_LTU (mode, op0, op1); | |
2799 | else | |
2800 | return gen_rtx_LT (mode, op0, op1); | |
2801 | ||
2802 | case LE_EXPR: | |
2803 | if (unsignedp) | |
2804 | return gen_rtx_LEU (mode, op0, op1); | |
2805 | else | |
2806 | return gen_rtx_LE (mode, op0, op1); | |
2807 | ||
2808 | case GT_EXPR: | |
2809 | if (unsignedp) | |
2810 | return gen_rtx_GTU (mode, op0, op1); | |
2811 | else | |
2812 | return gen_rtx_GT (mode, op0, op1); | |
2813 | ||
2814 | case GE_EXPR: | |
2815 | if (unsignedp) | |
2816 | return gen_rtx_GEU (mode, op0, op1); | |
2817 | else | |
2818 | return gen_rtx_GE (mode, op0, op1); | |
2819 | ||
2820 | case EQ_EXPR: | |
2821 | return gen_rtx_EQ (mode, op0, op1); | |
2822 | ||
2823 | case NE_EXPR: | |
2824 | return gen_rtx_NE (mode, op0, op1); | |
2825 | ||
2826 | case UNORDERED_EXPR: | |
2827 | return gen_rtx_UNORDERED (mode, op0, op1); | |
2828 | ||
2829 | case ORDERED_EXPR: | |
2830 | return gen_rtx_ORDERED (mode, op0, op1); | |
2831 | ||
2832 | case UNLT_EXPR: | |
2833 | return gen_rtx_UNLT (mode, op0, op1); | |
2834 | ||
2835 | case UNLE_EXPR: | |
2836 | return gen_rtx_UNLE (mode, op0, op1); | |
2837 | ||
2838 | case UNGT_EXPR: | |
2839 | return gen_rtx_UNGT (mode, op0, op1); | |
2840 | ||
2841 | case UNGE_EXPR: | |
2842 | return gen_rtx_UNGE (mode, op0, op1); | |
2843 | ||
2844 | case UNEQ_EXPR: | |
2845 | return gen_rtx_UNEQ (mode, op0, op1); | |
2846 | ||
2847 | case LTGT_EXPR: | |
2848 | return gen_rtx_LTGT (mode, op0, op1); | |
2849 | ||
2850 | case COND_EXPR: | |
2851 | return gen_rtx_IF_THEN_ELSE (mode, op0, op1, op2); | |
2852 | ||
2853 | case COMPLEX_EXPR: | |
2854 | gcc_assert (COMPLEX_MODE_P (mode)); | |
2855 | if (GET_MODE (op0) == VOIDmode) | |
2856 | op0 = gen_rtx_CONST (GET_MODE_INNER (mode), op0); | |
2857 | if (GET_MODE (op1) == VOIDmode) | |
2858 | op1 = gen_rtx_CONST (GET_MODE_INNER (mode), op1); | |
2859 | return gen_rtx_CONCAT (mode, op0, op1); | |
2860 | ||
d02a5a4b JJ |
2861 | case CONJ_EXPR: |
2862 | if (GET_CODE (op0) == CONCAT) | |
2863 | return gen_rtx_CONCAT (mode, XEXP (op0, 0), | |
2864 | gen_rtx_NEG (GET_MODE_INNER (mode), | |
2865 | XEXP (op0, 1))); | |
2866 | else | |
2867 | { | |
2868 | enum machine_mode imode = GET_MODE_INNER (mode); | |
2869 | rtx re, im; | |
2870 | ||
2871 | if (MEM_P (op0)) | |
2872 | { | |
2873 | re = adjust_address_nv (op0, imode, 0); | |
2874 | im = adjust_address_nv (op0, imode, GET_MODE_SIZE (imode)); | |
2875 | } | |
2876 | else | |
2877 | { | |
2878 | enum machine_mode ifmode = int_mode_for_mode (mode); | |
2879 | enum machine_mode ihmode = int_mode_for_mode (imode); | |
2880 | rtx halfsize; | |
2881 | if (ifmode == BLKmode || ihmode == BLKmode) | |
2882 | return NULL; | |
2883 | halfsize = GEN_INT (GET_MODE_BITSIZE (ihmode)); | |
2884 | re = op0; | |
2885 | if (mode != ifmode) | |
2886 | re = gen_rtx_SUBREG (ifmode, re, 0); | |
2887 | re = gen_rtx_ZERO_EXTRACT (ihmode, re, halfsize, const0_rtx); | |
2888 | if (imode != ihmode) | |
2889 | re = gen_rtx_SUBREG (imode, re, 0); | |
2890 | im = copy_rtx (op0); | |
2891 | if (mode != ifmode) | |
2892 | im = gen_rtx_SUBREG (ifmode, im, 0); | |
2893 | im = gen_rtx_ZERO_EXTRACT (ihmode, im, halfsize, halfsize); | |
2894 | if (imode != ihmode) | |
2895 | im = gen_rtx_SUBREG (imode, im, 0); | |
2896 | } | |
2897 | im = gen_rtx_NEG (imode, im); | |
2898 | return gen_rtx_CONCAT (mode, re, im); | |
2899 | } | |
2900 | ||
b5b8b0ac AO |
2901 | case ADDR_EXPR: |
2902 | op0 = expand_debug_expr (TREE_OPERAND (exp, 0)); | |
2903 | if (!op0 || !MEM_P (op0)) | |
2904 | return NULL; | |
2905 | ||
dda2da58 AO |
2906 | op0 = convert_debug_memory_address (mode, XEXP (op0, 0)); |
2907 | ||
2908 | return op0; | |
b5b8b0ac AO |
2909 | |
2910 | case VECTOR_CST: | |
2911 | exp = build_constructor_from_list (TREE_TYPE (exp), | |
2912 | TREE_VECTOR_CST_ELTS (exp)); | |
2913 | /* Fall through. */ | |
2914 | ||
2915 | case CONSTRUCTOR: | |
2916 | if (TREE_CODE (TREE_TYPE (exp)) == VECTOR_TYPE) | |
2917 | { | |
2918 | unsigned i; | |
2919 | tree val; | |
2920 | ||
2921 | op0 = gen_rtx_CONCATN | |
2922 | (mode, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)))); | |
2923 | ||
2924 | FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp), i, val) | |
2925 | { | |
2926 | op1 = expand_debug_expr (val); | |
2927 | if (!op1) | |
2928 | return NULL; | |
2929 | XVECEXP (op0, 0, i) = op1; | |
2930 | } | |
2931 | ||
2932 | if (i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp))) | |
2933 | { | |
2934 | op1 = expand_debug_expr | |
2935 | (fold_convert (TREE_TYPE (TREE_TYPE (exp)), integer_zero_node)); | |
2936 | ||
2937 | if (!op1) | |
2938 | return NULL; | |
2939 | ||
2940 | for (; i < TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp)); i++) | |
2941 | XVECEXP (op0, 0, i) = op1; | |
2942 | } | |
2943 | ||
2944 | return op0; | |
2945 | } | |
2946 | else | |
2947 | goto flag_unsupported; | |
2948 | ||
2949 | case CALL_EXPR: | |
2950 | /* ??? Maybe handle some builtins? */ | |
2951 | return NULL; | |
2952 | ||
2953 | case SSA_NAME: | |
2954 | { | |
2a8e30fb MM |
2955 | gimple g = get_gimple_for_ssa_name (exp); |
2956 | if (g) | |
2957 | { | |
2958 | op0 = expand_debug_expr (gimple_assign_rhs_to_tree (g)); | |
2959 | if (!op0) | |
2960 | return NULL; | |
2961 | } | |
2962 | else | |
2963 | { | |
2964 | int part = var_to_partition (SA.map, exp); | |
b5b8b0ac | 2965 | |
2a8e30fb MM |
2966 | if (part == NO_PARTITION) |
2967 | return NULL; | |
b5b8b0ac | 2968 | |
2a8e30fb | 2969 | gcc_assert (part >= 0 && (unsigned)part < SA.map->num_partitions); |
b5b8b0ac | 2970 | |
2a8e30fb MM |
2971 | op0 = SA.partition_to_pseudo[part]; |
2972 | } | |
b5b8b0ac AO |
2973 | goto adjust_mode; |
2974 | } | |
2975 | ||
2976 | case ERROR_MARK: | |
2977 | return NULL; | |
2978 | ||
7ece48b1 JJ |
2979 | /* Vector stuff. For most of the codes we don't have rtl codes. */ |
2980 | case REALIGN_LOAD_EXPR: | |
2981 | case REDUC_MAX_EXPR: | |
2982 | case REDUC_MIN_EXPR: | |
2983 | case REDUC_PLUS_EXPR: | |
2984 | case VEC_COND_EXPR: | |
2985 | case VEC_EXTRACT_EVEN_EXPR: | |
2986 | case VEC_EXTRACT_ODD_EXPR: | |
2987 | case VEC_INTERLEAVE_HIGH_EXPR: | |
2988 | case VEC_INTERLEAVE_LOW_EXPR: | |
2989 | case VEC_LSHIFT_EXPR: | |
2990 | case VEC_PACK_FIX_TRUNC_EXPR: | |
2991 | case VEC_PACK_SAT_EXPR: | |
2992 | case VEC_PACK_TRUNC_EXPR: | |
2993 | case VEC_RSHIFT_EXPR: | |
2994 | case VEC_UNPACK_FLOAT_HI_EXPR: | |
2995 | case VEC_UNPACK_FLOAT_LO_EXPR: | |
2996 | case VEC_UNPACK_HI_EXPR: | |
2997 | case VEC_UNPACK_LO_EXPR: | |
2998 | case VEC_WIDEN_MULT_HI_EXPR: | |
2999 | case VEC_WIDEN_MULT_LO_EXPR: | |
3000 | return NULL; | |
3001 | ||
3002 | /* Misc codes. */ | |
3003 | case ADDR_SPACE_CONVERT_EXPR: | |
3004 | case FIXED_CONVERT_EXPR: | |
3005 | case OBJ_TYPE_REF: | |
3006 | case WITH_SIZE_EXPR: | |
3007 | return NULL; | |
3008 | ||
3009 | case DOT_PROD_EXPR: | |
3010 | if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3011 | && SCALAR_INT_MODE_P (mode)) | |
3012 | { | |
3013 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
3014 | op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
3015 | else | |
3016 | op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
3017 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) | |
3018 | op1 = gen_rtx_ZERO_EXTEND (mode, op1); | |
3019 | else | |
3020 | op1 = gen_rtx_SIGN_EXTEND (mode, op1); | |
3021 | op0 = gen_rtx_MULT (mode, op0, op1); | |
3022 | return gen_rtx_PLUS (mode, op0, op2); | |
3023 | } | |
3024 | return NULL; | |
3025 | ||
3026 | case WIDEN_MULT_EXPR: | |
3027 | if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3028 | && SCALAR_INT_MODE_P (mode)) | |
3029 | { | |
5b58b39b | 3030 | enum machine_mode inner_mode = GET_MODE (op0); |
7ece48b1 | 3031 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) |
5b58b39b | 3032 | op0 = simplify_gen_unary (ZERO_EXTEND, mode, op0, inner_mode); |
7ece48b1 | 3033 | else |
5b58b39b | 3034 | op0 = simplify_gen_unary (SIGN_EXTEND, mode, op0, inner_mode); |
7ece48b1 | 3035 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 1)))) |
5b58b39b | 3036 | op1 = simplify_gen_unary (ZERO_EXTEND, mode, op1, inner_mode); |
7ece48b1 | 3037 | else |
5b58b39b | 3038 | op1 = simplify_gen_unary (SIGN_EXTEND, mode, op1, inner_mode); |
7ece48b1 JJ |
3039 | return gen_rtx_MULT (mode, op0, op1); |
3040 | } | |
3041 | return NULL; | |
3042 | ||
3043 | case WIDEN_SUM_EXPR: | |
3044 | if (SCALAR_INT_MODE_P (GET_MODE (op0)) | |
3045 | && SCALAR_INT_MODE_P (mode)) | |
3046 | { | |
3047 | if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp, 0)))) | |
3048 | op0 = gen_rtx_ZERO_EXTEND (mode, op0); | |
3049 | else | |
3050 | op0 = gen_rtx_SIGN_EXTEND (mode, op0); | |
3051 | return gen_rtx_PLUS (mode, op0, op1); | |
3052 | } | |
3053 | return NULL; | |
3054 | ||
b5b8b0ac AO |
3055 | default: |
3056 | flag_unsupported: | |
3057 | #ifdef ENABLE_CHECKING | |
3058 | debug_tree (exp); | |
3059 | gcc_unreachable (); | |
3060 | #else | |
3061 | return NULL; | |
3062 | #endif | |
3063 | } | |
3064 | } | |
3065 | ||
3066 | /* Expand the _LOCs in debug insns. We run this after expanding all | |
3067 | regular insns, so that any variables referenced in the function | |
3068 | will have their DECL_RTLs set. */ | |
3069 | ||
3070 | static void | |
3071 | expand_debug_locations (void) | |
3072 | { | |
3073 | rtx insn; | |
3074 | rtx last = get_last_insn (); | |
3075 | int save_strict_alias = flag_strict_aliasing; | |
3076 | ||
3077 | /* New alias sets while setting up memory attributes cause | |
3078 | -fcompare-debug failures, even though it doesn't bring about any | |
3079 | codegen changes. */ | |
3080 | flag_strict_aliasing = 0; | |
3081 | ||
3082 | for (insn = get_insns (); insn; insn = NEXT_INSN (insn)) | |
3083 | if (DEBUG_INSN_P (insn)) | |
3084 | { | |
3085 | tree value = (tree)INSN_VAR_LOCATION_LOC (insn); | |
3086 | rtx val; | |
3087 | enum machine_mode mode; | |
3088 | ||
3089 | if (value == NULL_TREE) | |
3090 | val = NULL_RTX; | |
3091 | else | |
3092 | { | |
3093 | val = expand_debug_expr (value); | |
3094 | gcc_assert (last == get_last_insn ()); | |
3095 | } | |
3096 | ||
3097 | if (!val) | |
3098 | val = gen_rtx_UNKNOWN_VAR_LOC (); | |
3099 | else | |
3100 | { | |
3101 | mode = GET_MODE (INSN_VAR_LOCATION (insn)); | |
3102 | ||
3103 | gcc_assert (mode == GET_MODE (val) | |
3104 | || (GET_MODE (val) == VOIDmode | |
3105 | && (CONST_INT_P (val) | |
3106 | || GET_CODE (val) == CONST_FIXED | |
3107 | || GET_CODE (val) == CONST_DOUBLE | |
3108 | || GET_CODE (val) == LABEL_REF))); | |
3109 | } | |
3110 | ||
3111 | INSN_VAR_LOCATION_LOC (insn) = val; | |
3112 | } | |
3113 | ||
3114 | flag_strict_aliasing = save_strict_alias; | |
3115 | } | |
3116 | ||
242229bb JH |
3117 | /* Expand basic block BB from GIMPLE trees to RTL. */ |
3118 | ||
3119 | static basic_block | |
10d22567 | 3120 | expand_gimple_basic_block (basic_block bb) |
242229bb | 3121 | { |
726a989a RB |
3122 | gimple_stmt_iterator gsi; |
3123 | gimple_seq stmts; | |
3124 | gimple stmt = NULL; | |
242229bb JH |
3125 | rtx note, last; |
3126 | edge e; | |
628f6a4e | 3127 | edge_iterator ei; |
8b11009b | 3128 | void **elt; |
242229bb JH |
3129 | |
3130 | if (dump_file) | |
726a989a RB |
3131 | fprintf (dump_file, "\n;; Generating RTL for gimple basic block %d\n", |
3132 | bb->index); | |
3133 | ||
3134 | /* Note that since we are now transitioning from GIMPLE to RTL, we | |
3135 | cannot use the gsi_*_bb() routines because they expect the basic | |
3136 | block to be in GIMPLE, instead of RTL. Therefore, we need to | |
3137 | access the BB sequence directly. */ | |
3138 | stmts = bb_seq (bb); | |
3139 | bb->il.gimple = NULL; | |
bf08ebeb | 3140 | rtl_profile_for_bb (bb); |
5e2d947c JH |
3141 | init_rtl_bb_info (bb); |
3142 | bb->flags |= BB_RTL; | |
3143 | ||
a9b77cd1 ZD |
3144 | /* Remove the RETURN_EXPR if we may fall though to the exit |
3145 | instead. */ | |
726a989a RB |
3146 | gsi = gsi_last (stmts); |
3147 | if (!gsi_end_p (gsi) | |
3148 | && gimple_code (gsi_stmt (gsi)) == GIMPLE_RETURN) | |
a9b77cd1 | 3149 | { |
726a989a | 3150 | gimple ret_stmt = gsi_stmt (gsi); |
a9b77cd1 ZD |
3151 | |
3152 | gcc_assert (single_succ_p (bb)); | |
3153 | gcc_assert (single_succ (bb) == EXIT_BLOCK_PTR); | |
3154 | ||
3155 | if (bb->next_bb == EXIT_BLOCK_PTR | |
726a989a | 3156 | && !gimple_return_retval (ret_stmt)) |
a9b77cd1 | 3157 | { |
726a989a | 3158 | gsi_remove (&gsi, false); |
a9b77cd1 ZD |
3159 | single_succ_edge (bb)->flags |= EDGE_FALLTHRU; |
3160 | } | |
3161 | } | |
3162 | ||
726a989a RB |
3163 | gsi = gsi_start (stmts); |
3164 | if (!gsi_end_p (gsi)) | |
8b11009b | 3165 | { |
726a989a RB |
3166 | stmt = gsi_stmt (gsi); |
3167 | if (gimple_code (stmt) != GIMPLE_LABEL) | |
3168 | stmt = NULL; | |
8b11009b | 3169 | } |
242229bb | 3170 | |
8b11009b ZD |
3171 | elt = pointer_map_contains (lab_rtx_for_bb, bb); |
3172 | ||
3173 | if (stmt || elt) | |
242229bb JH |
3174 | { |
3175 | last = get_last_insn (); | |
3176 | ||
8b11009b ZD |
3177 | if (stmt) |
3178 | { | |
28ed065e | 3179 | expand_gimple_stmt (stmt); |
726a989a | 3180 | gsi_next (&gsi); |
8b11009b ZD |
3181 | } |
3182 | ||
3183 | if (elt) | |
ae50c0cb | 3184 | emit_label ((rtx) *elt); |
242229bb | 3185 | |
caf93cb0 | 3186 | /* Java emits line number notes in the top of labels. |
c22cacf3 | 3187 | ??? Make this go away once line number notes are obsoleted. */ |
242229bb | 3188 | BB_HEAD (bb) = NEXT_INSN (last); |
4b4bf941 | 3189 | if (NOTE_P (BB_HEAD (bb))) |
242229bb | 3190 | BB_HEAD (bb) = NEXT_INSN (BB_HEAD (bb)); |
242229bb | 3191 | note = emit_note_after (NOTE_INSN_BASIC_BLOCK, BB_HEAD (bb)); |
b7211528 | 3192 | |
726a989a | 3193 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
242229bb JH |
3194 | } |
3195 | else | |
3196 | note = BB_HEAD (bb) = emit_note (NOTE_INSN_BASIC_BLOCK); | |
3197 | ||
3198 | NOTE_BASIC_BLOCK (note) = bb; | |
3199 | ||
726a989a | 3200 | for (; !gsi_end_p (gsi); gsi_next (&gsi)) |
242229bb | 3201 | { |
cea49550 | 3202 | basic_block new_bb; |
242229bb | 3203 | |
b5b8b0ac | 3204 | stmt = gsi_stmt (gsi); |
2a8e30fb MM |
3205 | |
3206 | /* If this statement is a non-debug one, and we generate debug | |
3207 | insns, then this one might be the last real use of a TERed | |
3208 | SSA_NAME, but where there are still some debug uses further | |
3209 | down. Expanding the current SSA name in such further debug | |
3210 | uses by their RHS might lead to wrong debug info, as coalescing | |
3211 | might make the operands of such RHS be placed into the same | |
3212 | pseudo as something else. Like so: | |
3213 | a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead | |
3214 | use(a_1); | |
3215 | a_2 = ... | |
3216 | #DEBUG ... => a_1 | |
3217 | As a_0 and a_2 don't overlap in lifetime, assume they are coalesced. | |
3218 | If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use, | |
3219 | the write to a_2 would actually have clobbered the place which | |
3220 | formerly held a_0. | |
3221 | ||
3222 | So, instead of that, we recognize the situation, and generate | |
3223 | debug temporaries at the last real use of TERed SSA names: | |
3224 | a_1 = a_0 + 1; | |
3225 | #DEBUG #D1 => a_1 | |
3226 | use(a_1); | |
3227 | a_2 = ... | |
3228 | #DEBUG ... => #D1 | |
3229 | */ | |
3230 | if (MAY_HAVE_DEBUG_INSNS | |
3231 | && SA.values | |
3232 | && !is_gimple_debug (stmt)) | |
3233 | { | |
3234 | ssa_op_iter iter; | |
3235 | tree op; | |
3236 | gimple def; | |
3237 | ||
3238 | location_t sloc = get_curr_insn_source_location (); | |
3239 | tree sblock = get_curr_insn_block (); | |
3240 | ||
3241 | /* Look for SSA names that have their last use here (TERed | |
3242 | names always have only one real use). */ | |
3243 | FOR_EACH_SSA_TREE_OPERAND (op, stmt, iter, SSA_OP_USE) | |
3244 | if ((def = get_gimple_for_ssa_name (op))) | |
3245 | { | |
3246 | imm_use_iterator imm_iter; | |
3247 | use_operand_p use_p; | |
3248 | bool have_debug_uses = false; | |
3249 | ||
3250 | FOR_EACH_IMM_USE_FAST (use_p, imm_iter, op) | |
3251 | { | |
3252 | if (gimple_debug_bind_p (USE_STMT (use_p))) | |
3253 | { | |
3254 | have_debug_uses = true; | |
3255 | break; | |
3256 | } | |
3257 | } | |
3258 | ||
3259 | if (have_debug_uses) | |
3260 | { | |
3261 | /* OP is a TERed SSA name, with DEF it's defining | |
3262 | statement, and where OP is used in further debug | |
3263 | instructions. Generate a debug temporary, and | |
3264 | replace all uses of OP in debug insns with that | |
3265 | temporary. */ | |
3266 | gimple debugstmt; | |
3267 | tree value = gimple_assign_rhs_to_tree (def); | |
3268 | tree vexpr = make_node (DEBUG_EXPR_DECL); | |
3269 | rtx val; | |
3270 | enum machine_mode mode; | |
3271 | ||
3272 | set_curr_insn_source_location (gimple_location (def)); | |
3273 | set_curr_insn_block (gimple_block (def)); | |
3274 | ||
3275 | DECL_ARTIFICIAL (vexpr) = 1; | |
3276 | TREE_TYPE (vexpr) = TREE_TYPE (value); | |
3277 | if (DECL_P (value)) | |
3278 | mode = DECL_MODE (value); | |
3279 | else | |
3280 | mode = TYPE_MODE (TREE_TYPE (value)); | |
3281 | DECL_MODE (vexpr) = mode; | |
3282 | ||
3283 | val = gen_rtx_VAR_LOCATION | |
3284 | (mode, vexpr, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3285 | ||
3286 | val = emit_debug_insn (val); | |
3287 | ||
3288 | FOR_EACH_IMM_USE_STMT (debugstmt, imm_iter, op) | |
3289 | { | |
3290 | if (!gimple_debug_bind_p (debugstmt)) | |
3291 | continue; | |
3292 | ||
3293 | FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter) | |
3294 | SET_USE (use_p, vexpr); | |
3295 | ||
3296 | update_stmt (debugstmt); | |
3297 | } | |
3298 | } | |
3299 | } | |
3300 | set_curr_insn_source_location (sloc); | |
3301 | set_curr_insn_block (sblock); | |
3302 | } | |
3303 | ||
a5883ba0 | 3304 | currently_expanding_gimple_stmt = stmt; |
b5b8b0ac | 3305 | |
242229bb JH |
3306 | /* Expand this statement, then evaluate the resulting RTL and |
3307 | fixup the CFG accordingly. */ | |
726a989a | 3308 | if (gimple_code (stmt) == GIMPLE_COND) |
cea49550 | 3309 | { |
726a989a | 3310 | new_bb = expand_gimple_cond (bb, stmt); |
cea49550 RH |
3311 | if (new_bb) |
3312 | return new_bb; | |
3313 | } | |
b5b8b0ac AO |
3314 | else if (gimple_debug_bind_p (stmt)) |
3315 | { | |
3316 | location_t sloc = get_curr_insn_source_location (); | |
3317 | tree sblock = get_curr_insn_block (); | |
3318 | gimple_stmt_iterator nsi = gsi; | |
3319 | ||
3320 | for (;;) | |
3321 | { | |
3322 | tree var = gimple_debug_bind_get_var (stmt); | |
3323 | tree value; | |
3324 | rtx val; | |
3325 | enum machine_mode mode; | |
3326 | ||
3327 | if (gimple_debug_bind_has_value_p (stmt)) | |
3328 | value = gimple_debug_bind_get_value (stmt); | |
3329 | else | |
3330 | value = NULL_TREE; | |
3331 | ||
3332 | last = get_last_insn (); | |
3333 | ||
3334 | set_curr_insn_source_location (gimple_location (stmt)); | |
3335 | set_curr_insn_block (gimple_block (stmt)); | |
3336 | ||
3337 | if (DECL_P (var)) | |
3338 | mode = DECL_MODE (var); | |
3339 | else | |
3340 | mode = TYPE_MODE (TREE_TYPE (var)); | |
3341 | ||
3342 | val = gen_rtx_VAR_LOCATION | |
3343 | (mode, var, (rtx)value, VAR_INIT_STATUS_INITIALIZED); | |
3344 | ||
3345 | val = emit_debug_insn (val); | |
3346 | ||
3347 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
3348 | { | |
3349 | /* We can't dump the insn with a TREE where an RTX | |
3350 | is expected. */ | |
3351 | INSN_VAR_LOCATION_LOC (val) = const0_rtx; | |
3352 | maybe_dump_rtl_for_gimple_stmt (stmt, last); | |
3353 | INSN_VAR_LOCATION_LOC (val) = (rtx)value; | |
3354 | } | |
3355 | ||
2a8e30fb MM |
3356 | /* In order not to generate too many debug temporaries, |
3357 | we delink all uses of debug statements we already expanded. | |
3358 | Therefore debug statements between definition and real | |
3359 | use of TERed SSA names will continue to use the SSA name, | |
3360 | and not be replaced with debug temps. */ | |
3361 | delink_stmt_imm_use (stmt); | |
3362 | ||
b5b8b0ac AO |
3363 | gsi = nsi; |
3364 | gsi_next (&nsi); | |
3365 | if (gsi_end_p (nsi)) | |
3366 | break; | |
3367 | stmt = gsi_stmt (nsi); | |
3368 | if (!gimple_debug_bind_p (stmt)) | |
3369 | break; | |
3370 | } | |
3371 | ||
3372 | set_curr_insn_source_location (sloc); | |
3373 | set_curr_insn_block (sblock); | |
3374 | } | |
80c7a9eb | 3375 | else |
242229bb | 3376 | { |
726a989a | 3377 | if (is_gimple_call (stmt) && gimple_call_tail_p (stmt)) |
cea49550 RH |
3378 | { |
3379 | bool can_fallthru; | |
3380 | new_bb = expand_gimple_tailcall (bb, stmt, &can_fallthru); | |
3381 | if (new_bb) | |
3382 | { | |
3383 | if (can_fallthru) | |
3384 | bb = new_bb; | |
3385 | else | |
3386 | return new_bb; | |
3387 | } | |
3388 | } | |
4d7a65ea | 3389 | else |
b7211528 | 3390 | { |
4e3825db | 3391 | def_operand_p def_p; |
4e3825db MM |
3392 | def_p = SINGLE_SSA_DEF_OPERAND (stmt, SSA_OP_DEF); |
3393 | ||
3394 | if (def_p != NULL) | |
3395 | { | |
3396 | /* Ignore this stmt if it is in the list of | |
3397 | replaceable expressions. */ | |
3398 | if (SA.values | |
b8698a0f | 3399 | && bitmap_bit_p (SA.values, |
e97809c6 | 3400 | SSA_NAME_VERSION (DEF_FROM_PTR (def_p)))) |
4e3825db MM |
3401 | continue; |
3402 | } | |
28ed065e | 3403 | last = expand_gimple_stmt (stmt); |
726a989a | 3404 | maybe_dump_rtl_for_gimple_stmt (stmt, last); |
b7211528 | 3405 | } |
242229bb JH |
3406 | } |
3407 | } | |
3408 | ||
a5883ba0 MM |
3409 | currently_expanding_gimple_stmt = NULL; |
3410 | ||
7241571e | 3411 | /* Expand implicit goto and convert goto_locus. */ |
a9b77cd1 ZD |
3412 | FOR_EACH_EDGE (e, ei, bb->succs) |
3413 | { | |
7241571e JJ |
3414 | if (e->goto_locus && e->goto_block) |
3415 | { | |
3416 | set_curr_insn_source_location (e->goto_locus); | |
3417 | set_curr_insn_block (e->goto_block); | |
3418 | e->goto_locus = curr_insn_locator (); | |
3419 | } | |
3420 | e->goto_block = NULL; | |
3421 | if ((e->flags & EDGE_FALLTHRU) && e->dest != bb->next_bb) | |
3422 | { | |
3423 | emit_jump (label_rtx_for_bb (e->dest)); | |
3424 | e->flags &= ~EDGE_FALLTHRU; | |
3425 | } | |
a9b77cd1 ZD |
3426 | } |
3427 | ||
ae761c45 AH |
3428 | /* Expanded RTL can create a jump in the last instruction of block. |
3429 | This later might be assumed to be a jump to successor and break edge insertion. | |
3430 | We need to insert dummy move to prevent this. PR41440. */ | |
3431 | if (single_succ_p (bb) | |
3432 | && (single_succ_edge (bb)->flags & EDGE_FALLTHRU) | |
3433 | && (last = get_last_insn ()) | |
3434 | && JUMP_P (last)) | |
3435 | { | |
3436 | rtx dummy = gen_reg_rtx (SImode); | |
3437 | emit_insn_after_noloc (gen_move_insn (dummy, dummy), last, NULL); | |
3438 | } | |
3439 | ||
242229bb JH |
3440 | do_pending_stack_adjust (); |
3441 | ||
3f117656 | 3442 | /* Find the block tail. The last insn in the block is the insn |
242229bb JH |
3443 | before a barrier and/or table jump insn. */ |
3444 | last = get_last_insn (); | |
4b4bf941 | 3445 | if (BARRIER_P (last)) |
242229bb JH |
3446 | last = PREV_INSN (last); |
3447 | if (JUMP_TABLE_DATA_P (last)) | |
3448 | last = PREV_INSN (PREV_INSN (last)); | |
3449 | BB_END (bb) = last; | |
caf93cb0 | 3450 | |
242229bb | 3451 | update_bb_for_insn (bb); |
80c7a9eb | 3452 | |
242229bb JH |
3453 | return bb; |
3454 | } | |
3455 | ||
3456 | ||
3457 | /* Create a basic block for initialization code. */ | |
3458 | ||
3459 | static basic_block | |
3460 | construct_init_block (void) | |
3461 | { | |
3462 | basic_block init_block, first_block; | |
fd44f634 JH |
3463 | edge e = NULL; |
3464 | int flags; | |
275a4187 | 3465 | |
fd44f634 JH |
3466 | /* Multiple entry points not supported yet. */ |
3467 | gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR->succs) == 1); | |
5e2d947c JH |
3468 | init_rtl_bb_info (ENTRY_BLOCK_PTR); |
3469 | init_rtl_bb_info (EXIT_BLOCK_PTR); | |
3470 | ENTRY_BLOCK_PTR->flags |= BB_RTL; | |
3471 | EXIT_BLOCK_PTR->flags |= BB_RTL; | |
242229bb | 3472 | |
fd44f634 | 3473 | e = EDGE_SUCC (ENTRY_BLOCK_PTR, 0); |
275a4187 | 3474 | |
fd44f634 JH |
3475 | /* When entry edge points to first basic block, we don't need jump, |
3476 | otherwise we have to jump into proper target. */ | |
3477 | if (e && e->dest != ENTRY_BLOCK_PTR->next_bb) | |
3478 | { | |
726a989a | 3479 | tree label = gimple_block_label (e->dest); |
fd44f634 JH |
3480 | |
3481 | emit_jump (label_rtx (label)); | |
3482 | flags = 0; | |
275a4187 | 3483 | } |
fd44f634 JH |
3484 | else |
3485 | flags = EDGE_FALLTHRU; | |
242229bb JH |
3486 | |
3487 | init_block = create_basic_block (NEXT_INSN (get_insns ()), | |
3488 | get_last_insn (), | |
3489 | ENTRY_BLOCK_PTR); | |
3490 | init_block->frequency = ENTRY_BLOCK_PTR->frequency; | |
3491 | init_block->count = ENTRY_BLOCK_PTR->count; | |
3492 | if (e) | |
3493 | { | |
3494 | first_block = e->dest; | |
3495 | redirect_edge_succ (e, init_block); | |
fd44f634 | 3496 | e = make_edge (init_block, first_block, flags); |
242229bb JH |
3497 | } |
3498 | else | |
3499 | e = make_edge (init_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); | |
3500 | e->probability = REG_BR_PROB_BASE; | |
3501 | e->count = ENTRY_BLOCK_PTR->count; | |
3502 | ||
3503 | update_bb_for_insn (init_block); | |
3504 | return init_block; | |
3505 | } | |
3506 | ||
55e092c4 JH |
3507 | /* For each lexical block, set BLOCK_NUMBER to the depth at which it is |
3508 | found in the block tree. */ | |
3509 | ||
3510 | static void | |
3511 | set_block_levels (tree block, int level) | |
3512 | { | |
3513 | while (block) | |
3514 | { | |
3515 | BLOCK_NUMBER (block) = level; | |
3516 | set_block_levels (BLOCK_SUBBLOCKS (block), level + 1); | |
3517 | block = BLOCK_CHAIN (block); | |
3518 | } | |
3519 | } | |
242229bb JH |
3520 | |
3521 | /* Create a block containing landing pads and similar stuff. */ | |
3522 | ||
3523 | static void | |
3524 | construct_exit_block (void) | |
3525 | { | |
3526 | rtx head = get_last_insn (); | |
3527 | rtx end; | |
3528 | basic_block exit_block; | |
628f6a4e BE |
3529 | edge e, e2; |
3530 | unsigned ix; | |
3531 | edge_iterator ei; | |
071a42f9 | 3532 | rtx orig_end = BB_END (EXIT_BLOCK_PTR->prev_bb); |
242229bb | 3533 | |
bf08ebeb JH |
3534 | rtl_profile_for_bb (EXIT_BLOCK_PTR); |
3535 | ||
caf93cb0 | 3536 | /* Make sure the locus is set to the end of the function, so that |
242229bb | 3537 | epilogue line numbers and warnings are set properly. */ |
6773e15f | 3538 | if (cfun->function_end_locus != UNKNOWN_LOCATION) |
242229bb JH |
3539 | input_location = cfun->function_end_locus; |
3540 | ||
3541 | /* The following insns belong to the top scope. */ | |
55e092c4 | 3542 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
242229bb | 3543 | |
242229bb JH |
3544 | /* Generate rtl for function exit. */ |
3545 | expand_function_end (); | |
3546 | ||
3547 | end = get_last_insn (); | |
3548 | if (head == end) | |
3549 | return; | |
071a42f9 JH |
3550 | /* While emitting the function end we could move end of the last basic block. |
3551 | */ | |
3552 | BB_END (EXIT_BLOCK_PTR->prev_bb) = orig_end; | |
4b4bf941 | 3553 | while (NEXT_INSN (head) && NOTE_P (NEXT_INSN (head))) |
242229bb | 3554 | head = NEXT_INSN (head); |
80c7a9eb RH |
3555 | exit_block = create_basic_block (NEXT_INSN (head), end, |
3556 | EXIT_BLOCK_PTR->prev_bb); | |
242229bb JH |
3557 | exit_block->frequency = EXIT_BLOCK_PTR->frequency; |
3558 | exit_block->count = EXIT_BLOCK_PTR->count; | |
628f6a4e BE |
3559 | |
3560 | ix = 0; | |
3561 | while (ix < EDGE_COUNT (EXIT_BLOCK_PTR->preds)) | |
242229bb | 3562 | { |
8fb790fd | 3563 | e = EDGE_PRED (EXIT_BLOCK_PTR, ix); |
242229bb | 3564 | if (!(e->flags & EDGE_ABNORMAL)) |
628f6a4e BE |
3565 | redirect_edge_succ (e, exit_block); |
3566 | else | |
3567 | ix++; | |
242229bb | 3568 | } |
628f6a4e | 3569 | |
242229bb JH |
3570 | e = make_edge (exit_block, EXIT_BLOCK_PTR, EDGE_FALLTHRU); |
3571 | e->probability = REG_BR_PROB_BASE; | |
3572 | e->count = EXIT_BLOCK_PTR->count; | |
628f6a4e | 3573 | FOR_EACH_EDGE (e2, ei, EXIT_BLOCK_PTR->preds) |
242229bb JH |
3574 | if (e2 != e) |
3575 | { | |
c22cacf3 | 3576 | e->count -= e2->count; |
242229bb JH |
3577 | exit_block->count -= e2->count; |
3578 | exit_block->frequency -= EDGE_FREQUENCY (e2); | |
3579 | } | |
3580 | if (e->count < 0) | |
3581 | e->count = 0; | |
3582 | if (exit_block->count < 0) | |
3583 | exit_block->count = 0; | |
3584 | if (exit_block->frequency < 0) | |
3585 | exit_block->frequency = 0; | |
3586 | update_bb_for_insn (exit_block); | |
3587 | } | |
3588 | ||
c22cacf3 | 3589 | /* Helper function for discover_nonconstant_array_refs. |
a1b23b2f UW |
3590 | Look for ARRAY_REF nodes with non-constant indexes and mark them |
3591 | addressable. */ | |
3592 | ||
3593 | static tree | |
3594 | discover_nonconstant_array_refs_r (tree * tp, int *walk_subtrees, | |
3595 | void *data ATTRIBUTE_UNUSED) | |
3596 | { | |
3597 | tree t = *tp; | |
3598 | ||
3599 | if (IS_TYPE_OR_DECL_P (t)) | |
3600 | *walk_subtrees = 0; | |
3601 | else if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3602 | { | |
3603 | while (((TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3604 | && is_gimple_min_invariant (TREE_OPERAND (t, 1)) | |
3605 | && (!TREE_OPERAND (t, 2) | |
3606 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3607 | || (TREE_CODE (t) == COMPONENT_REF | |
3608 | && (!TREE_OPERAND (t,2) | |
3609 | || is_gimple_min_invariant (TREE_OPERAND (t, 2)))) | |
3610 | || TREE_CODE (t) == BIT_FIELD_REF | |
3611 | || TREE_CODE (t) == REALPART_EXPR | |
3612 | || TREE_CODE (t) == IMAGPART_EXPR | |
3613 | || TREE_CODE (t) == VIEW_CONVERT_EXPR | |
1043771b | 3614 | || CONVERT_EXPR_P (t)) |
a1b23b2f UW |
3615 | t = TREE_OPERAND (t, 0); |
3616 | ||
3617 | if (TREE_CODE (t) == ARRAY_REF || TREE_CODE (t) == ARRAY_RANGE_REF) | |
3618 | { | |
3619 | t = get_base_address (t); | |
6f11d690 RG |
3620 | if (t && DECL_P (t) |
3621 | && DECL_MODE (t) != BLKmode) | |
a1b23b2f UW |
3622 | TREE_ADDRESSABLE (t) = 1; |
3623 | } | |
3624 | ||
3625 | *walk_subtrees = 0; | |
3626 | } | |
3627 | ||
3628 | return NULL_TREE; | |
3629 | } | |
3630 | ||
3631 | /* RTL expansion is not able to compile array references with variable | |
3632 | offsets for arrays stored in single register. Discover such | |
3633 | expressions and mark variables as addressable to avoid this | |
3634 | scenario. */ | |
3635 | ||
3636 | static void | |
3637 | discover_nonconstant_array_refs (void) | |
3638 | { | |
3639 | basic_block bb; | |
726a989a | 3640 | gimple_stmt_iterator gsi; |
a1b23b2f UW |
3641 | |
3642 | FOR_EACH_BB (bb) | |
726a989a RB |
3643 | for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi)) |
3644 | { | |
3645 | gimple stmt = gsi_stmt (gsi); | |
aa847cc8 JJ |
3646 | if (!is_gimple_debug (stmt)) |
3647 | walk_gimple_op (stmt, discover_nonconstant_array_refs_r, NULL); | |
726a989a | 3648 | } |
a1b23b2f UW |
3649 | } |
3650 | ||
2e3f842f L |
3651 | /* This function sets crtl->args.internal_arg_pointer to a virtual |
3652 | register if DRAP is needed. Local register allocator will replace | |
3653 | virtual_incoming_args_rtx with the virtual register. */ | |
3654 | ||
3655 | static void | |
3656 | expand_stack_alignment (void) | |
3657 | { | |
3658 | rtx drap_rtx; | |
e939805b | 3659 | unsigned int preferred_stack_boundary; |
2e3f842f L |
3660 | |
3661 | if (! SUPPORTS_STACK_ALIGNMENT) | |
3662 | return; | |
b8698a0f | 3663 | |
2e3f842f L |
3664 | if (cfun->calls_alloca |
3665 | || cfun->has_nonlocal_label | |
3666 | || crtl->has_nonlocal_goto) | |
3667 | crtl->need_drap = true; | |
3668 | ||
890b9b96 L |
3669 | /* Call update_stack_boundary here again to update incoming stack |
3670 | boundary. It may set incoming stack alignment to a different | |
3671 | value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may | |
3672 | use the minimum incoming stack alignment to check if it is OK | |
3673 | to perform sibcall optimization since sibcall optimization will | |
3674 | only align the outgoing stack to incoming stack boundary. */ | |
3675 | if (targetm.calls.update_stack_boundary) | |
3676 | targetm.calls.update_stack_boundary (); | |
3677 | ||
3678 | /* The incoming stack frame has to be aligned at least at | |
3679 | parm_stack_boundary. */ | |
3680 | gcc_assert (crtl->parm_stack_boundary <= INCOMING_STACK_BOUNDARY); | |
2e3f842f | 3681 | |
2e3f842f L |
3682 | /* Update crtl->stack_alignment_estimated and use it later to align |
3683 | stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call | |
3684 | exceptions since callgraph doesn't collect incoming stack alignment | |
3685 | in this case. */ | |
3686 | if (flag_non_call_exceptions | |
3687 | && PREFERRED_STACK_BOUNDARY > crtl->preferred_stack_boundary) | |
3688 | preferred_stack_boundary = PREFERRED_STACK_BOUNDARY; | |
3689 | else | |
3690 | preferred_stack_boundary = crtl->preferred_stack_boundary; | |
3691 | if (preferred_stack_boundary > crtl->stack_alignment_estimated) | |
3692 | crtl->stack_alignment_estimated = preferred_stack_boundary; | |
3693 | if (preferred_stack_boundary > crtl->stack_alignment_needed) | |
3694 | crtl->stack_alignment_needed = preferred_stack_boundary; | |
3695 | ||
890b9b96 L |
3696 | gcc_assert (crtl->stack_alignment_needed |
3697 | <= crtl->stack_alignment_estimated); | |
3698 | ||
2e3f842f | 3699 | crtl->stack_realign_needed |
e939805b | 3700 | = INCOMING_STACK_BOUNDARY < crtl->stack_alignment_estimated; |
d2d93c32 | 3701 | crtl->stack_realign_tried = crtl->stack_realign_needed; |
2e3f842f L |
3702 | |
3703 | crtl->stack_realign_processed = true; | |
3704 | ||
3705 | /* Target has to redefine TARGET_GET_DRAP_RTX to support stack | |
3706 | alignment. */ | |
3707 | gcc_assert (targetm.calls.get_drap_rtx != NULL); | |
b8698a0f | 3708 | drap_rtx = targetm.calls.get_drap_rtx (); |
2e3f842f | 3709 | |
d015f7cc L |
3710 | /* stack_realign_drap and drap_rtx must match. */ |
3711 | gcc_assert ((stack_realign_drap != 0) == (drap_rtx != NULL)); | |
3712 | ||
2e3f842f L |
3713 | /* Do nothing if NULL is returned, which means DRAP is not needed. */ |
3714 | if (NULL != drap_rtx) | |
3715 | { | |
3716 | crtl->args.internal_arg_pointer = drap_rtx; | |
3717 | ||
3718 | /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is | |
3719 | needed. */ | |
3720 | fixup_tail_calls (); | |
3721 | } | |
3722 | } | |
3723 | ||
242229bb JH |
3724 | /* Translate the intermediate representation contained in the CFG |
3725 | from GIMPLE trees to RTL. | |
3726 | ||
3727 | We do conversion per basic block and preserve/update the tree CFG. | |
3728 | This implies we have to do some magic as the CFG can simultaneously | |
3729 | consist of basic blocks containing RTL and GIMPLE trees. This can | |
61ada8ae | 3730 | confuse the CFG hooks, so be careful to not manipulate CFG during |
242229bb JH |
3731 | the expansion. */ |
3732 | ||
c2924966 | 3733 | static unsigned int |
726a989a | 3734 | gimple_expand_cfg (void) |
242229bb JH |
3735 | { |
3736 | basic_block bb, init_block; | |
3737 | sbitmap blocks; | |
0ef90296 ZD |
3738 | edge_iterator ei; |
3739 | edge e; | |
4e3825db MM |
3740 | unsigned i; |
3741 | ||
3742 | rewrite_out_of_ssa (&SA); | |
3743 | SA.partition_to_pseudo = (rtx *)xcalloc (SA.map->num_partitions, | |
3744 | sizeof (rtx)); | |
242229bb | 3745 | |
4586b4ca SB |
3746 | /* Some backends want to know that we are expanding to RTL. */ |
3747 | currently_expanding_to_rtl = 1; | |
3748 | ||
bf08ebeb JH |
3749 | rtl_profile_for_bb (ENTRY_BLOCK_PTR); |
3750 | ||
55e092c4 | 3751 | insn_locators_alloc (); |
fe8a7779 | 3752 | if (!DECL_IS_BUILTIN (current_function_decl)) |
1751ecd6 AH |
3753 | { |
3754 | /* Eventually, all FEs should explicitly set function_start_locus. */ | |
3755 | if (cfun->function_start_locus == UNKNOWN_LOCATION) | |
3756 | set_curr_insn_source_location | |
3757 | (DECL_SOURCE_LOCATION (current_function_decl)); | |
3758 | else | |
3759 | set_curr_insn_source_location (cfun->function_start_locus); | |
3760 | } | |
55e092c4 JH |
3761 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
3762 | prologue_locator = curr_insn_locator (); | |
3763 | ||
3764 | /* Make sure first insn is a note even if we don't want linenums. | |
3765 | This makes sure the first insn will never be deleted. | |
3766 | Also, final expects a note to appear there. */ | |
3767 | emit_note (NOTE_INSN_DELETED); | |
6429e3be | 3768 | |
a1b23b2f UW |
3769 | /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */ |
3770 | discover_nonconstant_array_refs (); | |
3771 | ||
e41b2a33 | 3772 | targetm.expand_to_rtl_hook (); |
cb91fab0 | 3773 | crtl->stack_alignment_needed = STACK_BOUNDARY; |
2e3f842f | 3774 | crtl->max_used_stack_slot_alignment = STACK_BOUNDARY; |
890b9b96 | 3775 | crtl->stack_alignment_estimated = 0; |
cb91fab0 JH |
3776 | crtl->preferred_stack_boundary = STACK_BOUNDARY; |
3777 | cfun->cfg->max_jumptable_ents = 0; | |
3778 | ||
e41b2a33 | 3779 | |
727a31fa | 3780 | /* Expand the variables recorded during gimple lowering. */ |
242229bb JH |
3781 | expand_used_vars (); |
3782 | ||
7d69de61 RH |
3783 | /* Honor stack protection warnings. */ |
3784 | if (warn_stack_protect) | |
3785 | { | |
e3b5732b | 3786 | if (cfun->calls_alloca) |
b8698a0f | 3787 | warning (OPT_Wstack_protector, |
3b123595 SB |
3788 | "stack protector not protecting local variables: " |
3789 | "variable length buffer"); | |
cb91fab0 | 3790 | if (has_short_buffer && !crtl->stack_protect_guard) |
b8698a0f | 3791 | warning (OPT_Wstack_protector, |
3b123595 SB |
3792 | "stack protector not protecting function: " |
3793 | "all local arrays are less than %d bytes long", | |
7d69de61 RH |
3794 | (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE)); |
3795 | } | |
3796 | ||
242229bb | 3797 | /* Set up parameters and prepare for return, for the function. */ |
b79c5284 | 3798 | expand_function_start (current_function_decl); |
242229bb | 3799 | |
4e3825db MM |
3800 | /* Now that we also have the parameter RTXs, copy them over to our |
3801 | partitions. */ | |
3802 | for (i = 0; i < SA.map->num_partitions; i++) | |
3803 | { | |
3804 | tree var = SSA_NAME_VAR (partition_to_var (SA.map, i)); | |
3805 | ||
3806 | if (TREE_CODE (var) != VAR_DECL | |
3807 | && !SA.partition_to_pseudo[i]) | |
3808 | SA.partition_to_pseudo[i] = DECL_RTL_IF_SET (var); | |
3809 | gcc_assert (SA.partition_to_pseudo[i]); | |
eb7adebc MM |
3810 | |
3811 | /* If this decl was marked as living in multiple places, reset | |
3812 | this now to NULL. */ | |
3813 | if (DECL_RTL_IF_SET (var) == pc_rtx) | |
3814 | SET_DECL_RTL (var, NULL); | |
3815 | ||
4e3825db MM |
3816 | /* Some RTL parts really want to look at DECL_RTL(x) when x |
3817 | was a decl marked in REG_ATTR or MEM_ATTR. We could use | |
3818 | SET_DECL_RTL here making this available, but that would mean | |
3819 | to select one of the potentially many RTLs for one DECL. Instead | |
3820 | of doing that we simply reset the MEM_EXPR of the RTL in question, | |
3821 | then nobody can get at it and hence nobody can call DECL_RTL on it. */ | |
3822 | if (!DECL_RTL_SET_P (var)) | |
3823 | { | |
3824 | if (MEM_P (SA.partition_to_pseudo[i])) | |
3825 | set_mem_expr (SA.partition_to_pseudo[i], NULL); | |
3826 | } | |
3827 | } | |
3828 | ||
242229bb JH |
3829 | /* If this function is `main', emit a call to `__main' |
3830 | to run global initializers, etc. */ | |
3831 | if (DECL_NAME (current_function_decl) | |
3832 | && MAIN_NAME_P (DECL_NAME (current_function_decl)) | |
3833 | && DECL_FILE_SCOPE_P (current_function_decl)) | |
3834 | expand_main_function (); | |
3835 | ||
7d69de61 RH |
3836 | /* Initialize the stack_protect_guard field. This must happen after the |
3837 | call to __main (if any) so that the external decl is initialized. */ | |
cb91fab0 | 3838 | if (crtl->stack_protect_guard) |
7d69de61 RH |
3839 | stack_protect_prologue (); |
3840 | ||
4e3825db MM |
3841 | expand_phi_nodes (&SA); |
3842 | ||
3fbd86b1 | 3843 | /* Register rtl specific functions for cfg. */ |
242229bb JH |
3844 | rtl_register_cfg_hooks (); |
3845 | ||
3846 | init_block = construct_init_block (); | |
3847 | ||
0ef90296 | 3848 | /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the |
4e3825db | 3849 | remaining edges later. */ |
0ef90296 ZD |
3850 | FOR_EACH_EDGE (e, ei, ENTRY_BLOCK_PTR->succs) |
3851 | e->flags &= ~EDGE_EXECUTABLE; | |
3852 | ||
8b11009b | 3853 | lab_rtx_for_bb = pointer_map_create (); |
242229bb | 3854 | FOR_BB_BETWEEN (bb, init_block->next_bb, EXIT_BLOCK_PTR, next_bb) |
10d22567 | 3855 | bb = expand_gimple_basic_block (bb); |
bf08ebeb | 3856 | |
b5b8b0ac AO |
3857 | if (MAY_HAVE_DEBUG_INSNS) |
3858 | expand_debug_locations (); | |
3859 | ||
4e3825db MM |
3860 | execute_free_datastructures (); |
3861 | finish_out_of_ssa (&SA); | |
3862 | ||
91753e21 RG |
3863 | /* We are no longer in SSA form. */ |
3864 | cfun->gimple_df->in_ssa_p = false; | |
3865 | ||
bf08ebeb JH |
3866 | /* Expansion is used by optimization passes too, set maybe_hot_insn_p |
3867 | conservatively to true until they are all profile aware. */ | |
8b11009b | 3868 | pointer_map_destroy (lab_rtx_for_bb); |
cb91fab0 | 3869 | free_histograms (); |
242229bb JH |
3870 | |
3871 | construct_exit_block (); | |
55e092c4 JH |
3872 | set_curr_insn_block (DECL_INITIAL (current_function_decl)); |
3873 | insn_locators_finalize (); | |
242229bb | 3874 | |
1d65f45c | 3875 | /* Zap the tree EH table. */ |
e8a2a782 | 3876 | set_eh_throw_stmt_table (cfun, NULL); |
242229bb JH |
3877 | |
3878 | rebuild_jump_labels (get_insns ()); | |
242229bb | 3879 | |
4e3825db MM |
3880 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR, EXIT_BLOCK_PTR, next_bb) |
3881 | { | |
3882 | edge e; | |
3883 | edge_iterator ei; | |
3884 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3885 | { | |
3886 | if (e->insns.r) | |
3887 | commit_one_edge_insertion (e); | |
3888 | else | |
3889 | ei_next (&ei); | |
3890 | } | |
3891 | } | |
3892 | ||
3893 | /* We're done expanding trees to RTL. */ | |
3894 | currently_expanding_to_rtl = 0; | |
3895 | ||
3896 | FOR_BB_BETWEEN (bb, ENTRY_BLOCK_PTR->next_bb, EXIT_BLOCK_PTR, next_bb) | |
3897 | { | |
3898 | edge e; | |
3899 | edge_iterator ei; | |
3900 | for (ei = ei_start (bb->succs); (e = ei_safe_edge (ei)); ) | |
3901 | { | |
3902 | /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */ | |
3903 | e->flags &= ~EDGE_EXECUTABLE; | |
3904 | ||
3905 | /* At the moment not all abnormal edges match the RTL | |
3906 | representation. It is safe to remove them here as | |
3907 | find_many_sub_basic_blocks will rediscover them. | |
3908 | In the future we should get this fixed properly. */ | |
3909 | if ((e->flags & EDGE_ABNORMAL) | |
3910 | && !(e->flags & EDGE_SIBCALL)) | |
3911 | remove_edge (e); | |
3912 | else | |
3913 | ei_next (&ei); | |
3914 | } | |
3915 | } | |
3916 | ||
242229bb JH |
3917 | blocks = sbitmap_alloc (last_basic_block); |
3918 | sbitmap_ones (blocks); | |
3919 | find_many_sub_basic_blocks (blocks); | |
242229bb | 3920 | sbitmap_free (blocks); |
4e3825db | 3921 | purge_all_dead_edges (); |
242229bb JH |
3922 | |
3923 | compact_blocks (); | |
2e3f842f L |
3924 | |
3925 | expand_stack_alignment (); | |
3926 | ||
242229bb | 3927 | #ifdef ENABLE_CHECKING |
62e5bf5d | 3928 | verify_flow_info (); |
242229bb | 3929 | #endif |
9f8628ba PB |
3930 | |
3931 | /* There's no need to defer outputting this function any more; we | |
3932 | know we want to output it. */ | |
3933 | DECL_DEFER_OUTPUT (current_function_decl) = 0; | |
3934 | ||
3935 | /* Now that we're done expanding trees to RTL, we shouldn't have any | |
3936 | more CONCATs anywhere. */ | |
3937 | generating_concat_p = 0; | |
3938 | ||
b7211528 SB |
3939 | if (dump_file) |
3940 | { | |
3941 | fprintf (dump_file, | |
3942 | "\n\n;;\n;; Full RTL generated for this function:\n;;\n"); | |
3943 | /* And the pass manager will dump RTL for us. */ | |
3944 | } | |
ef330312 PB |
3945 | |
3946 | /* If we're emitting a nested function, make sure its parent gets | |
3947 | emitted as well. Doing otherwise confuses debug info. */ | |
c22cacf3 | 3948 | { |
ef330312 PB |
3949 | tree parent; |
3950 | for (parent = DECL_CONTEXT (current_function_decl); | |
c22cacf3 MS |
3951 | parent != NULL_TREE; |
3952 | parent = get_containing_scope (parent)) | |
ef330312 | 3953 | if (TREE_CODE (parent) == FUNCTION_DECL) |
c22cacf3 | 3954 | TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent)) = 1; |
ef330312 | 3955 | } |
c22cacf3 | 3956 | |
ef330312 PB |
3957 | /* We are now committed to emitting code for this function. Do any |
3958 | preparation, such as emitting abstract debug info for the inline | |
3959 | before it gets mangled by optimization. */ | |
3960 | if (cgraph_function_possibly_inlined_p (current_function_decl)) | |
3961 | (*debug_hooks->outlining_inline_function) (current_function_decl); | |
3962 | ||
3963 | TREE_ASM_WRITTEN (current_function_decl) = 1; | |
4bb1e037 AP |
3964 | |
3965 | /* After expanding, the return labels are no longer needed. */ | |
3966 | return_label = NULL; | |
3967 | naked_return_label = NULL; | |
55e092c4 JH |
3968 | /* Tag the blocks with a depth number so that change_scope can find |
3969 | the common parent easily. */ | |
3970 | set_block_levels (DECL_INITIAL (cfun->decl), 0); | |
bf08ebeb | 3971 | default_rtl_profile (); |
c2924966 | 3972 | return 0; |
242229bb JH |
3973 | } |
3974 | ||
e3b5732b | 3975 | struct rtl_opt_pass pass_expand = |
242229bb | 3976 | { |
8ddbbcae | 3977 | { |
e3b5732b | 3978 | RTL_PASS, |
c22cacf3 | 3979 | "expand", /* name */ |
242229bb | 3980 | NULL, /* gate */ |
726a989a | 3981 | gimple_expand_cfg, /* execute */ |
242229bb JH |
3982 | NULL, /* sub */ |
3983 | NULL, /* next */ | |
3984 | 0, /* static_pass_number */ | |
c22cacf3 | 3985 | TV_EXPAND, /* tv_id */ |
688a482d RG |
3986 | PROP_ssa | PROP_gimple_leh | PROP_cfg |
3987 | | PROP_gimple_lcx, /* properties_required */ | |
242229bb | 3988 | PROP_rtl, /* properties_provided */ |
4e3825db MM |
3989 | PROP_ssa | PROP_trees, /* properties_destroyed */ |
3990 | TODO_verify_ssa | TODO_verify_flow | |
3991 | | TODO_verify_stmts, /* todo_flags_start */ | |
3992 | TODO_dump_func | |
3993 | | TODO_ggc_collect /* todo_flags_finish */ | |
8ddbbcae | 3994 | } |
242229bb | 3995 | }; |