1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify
8 it under the terms of the GNU General Public License as published by
9 the Free Software Foundation; either version 3, or (at your option)
12 GCC is distributed in the hope that it will be useful,
13 but WITHOUT ANY WARRANTY; without even the implied warranty of
14 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
15 GNU General Public License for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
28 #include "basic-block.h"
31 #include "langhooks.h"
32 #include "tree-flow.h"
34 #include "tree-dump.h"
35 #include "tree-pass.h"
38 #include "diagnostic.h"
39 #include "tree-pretty-print.h"
40 #include "gimple-pretty-print.h"
44 #include "tree-inline.h"
45 #include "value-prof.h"
47 #include "ssaexpand.h"
50 #include "insn-attr.h" /* For INSN_SCHEDULING. */
52 /* This variable holds information helping the rewriting of SSA trees
56 /* This variable holds the currently expanded gimple statement for purposes
57 of comminucating the profile info to the builtin expanders. */
58 gimple currently_expanding_gimple_stmt
;
60 static rtx
expand_debug_expr (tree
);
62 /* Return an expression tree corresponding to the RHS of GIMPLE
66 gimple_assign_rhs_to_tree (gimple stmt
)
69 enum gimple_rhs_class grhs_class
;
71 grhs_class
= get_gimple_rhs_class (gimple_expr_code (stmt
));
73 if (grhs_class
== GIMPLE_TERNARY_RHS
)
74 t
= build3 (gimple_assign_rhs_code (stmt
),
75 TREE_TYPE (gimple_assign_lhs (stmt
)),
76 gimple_assign_rhs1 (stmt
),
77 gimple_assign_rhs2 (stmt
),
78 gimple_assign_rhs3 (stmt
));
79 else if (grhs_class
== GIMPLE_BINARY_RHS
)
80 t
= build2 (gimple_assign_rhs_code (stmt
),
81 TREE_TYPE (gimple_assign_lhs (stmt
)),
82 gimple_assign_rhs1 (stmt
),
83 gimple_assign_rhs2 (stmt
));
84 else if (grhs_class
== GIMPLE_UNARY_RHS
)
85 t
= build1 (gimple_assign_rhs_code (stmt
),
86 TREE_TYPE (gimple_assign_lhs (stmt
)),
87 gimple_assign_rhs1 (stmt
));
88 else if (grhs_class
== GIMPLE_SINGLE_RHS
)
90 t
= gimple_assign_rhs1 (stmt
);
91 /* Avoid modifying this tree in place below. */
92 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
93 && gimple_location (stmt
) != EXPR_LOCATION (t
))
94 || (gimple_block (stmt
)
95 && currently_expanding_to_rtl
97 && gimple_block (stmt
) != TREE_BLOCK (t
)))
103 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
104 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
105 if (gimple_block (stmt
) && currently_expanding_to_rtl
&& EXPR_P (t
))
106 TREE_BLOCK (t
) = gimple_block (stmt
);
112 #ifndef STACK_ALIGNMENT_NEEDED
113 #define STACK_ALIGNMENT_NEEDED 1
116 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
118 /* Associate declaration T with storage space X. If T is no
119 SSA name this is exactly SET_DECL_RTL, otherwise make the
120 partition of T associated with X. */
122 set_rtl (tree t
, rtx x
)
124 if (TREE_CODE (t
) == SSA_NAME
)
126 SA
.partition_to_pseudo
[var_to_partition (SA
.map
, t
)] = x
;
128 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (t
), x
);
129 /* For the benefit of debug information at -O0 (where vartracking
130 doesn't run) record the place also in the base DECL if it's
131 a normal variable (not a parameter). */
132 if (x
&& x
!= pc_rtx
&& TREE_CODE (SSA_NAME_VAR (t
)) == VAR_DECL
)
134 tree var
= SSA_NAME_VAR (t
);
135 /* If we don't yet have something recorded, just record it now. */
136 if (!DECL_RTL_SET_P (var
))
137 SET_DECL_RTL (var
, x
);
138 /* If we have it set alrady to "multiple places" don't
140 else if (DECL_RTL (var
) == pc_rtx
)
142 /* If we have something recorded and it's not the same place
143 as we want to record now, we have multiple partitions for the
144 same base variable, with different places. We can't just
145 randomly chose one, hence we have to say that we don't know.
146 This only happens with optimization, and there var-tracking
147 will figure out the right thing. */
148 else if (DECL_RTL (var
) != x
)
149 SET_DECL_RTL (var
, pc_rtx
);
156 /* This structure holds data relevant to one variable that will be
157 placed in a stack slot. */
163 /* Initially, the size of the variable. Later, the size of the partition,
164 if this variable becomes it's partition's representative. */
167 /* The *byte* alignment required for this variable. Or as, with the
168 size, the alignment for this partition. */
171 /* The partition representative. */
172 size_t representative
;
174 /* The next stack variable in the partition, or EOC. */
177 /* The numbers of conflicting stack variables. */
181 #define EOC ((size_t)-1)
183 /* We have an array of such objects while deciding allocation. */
184 static struct stack_var
*stack_vars
;
185 static size_t stack_vars_alloc
;
186 static size_t stack_vars_num
;
188 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
189 is non-decreasing. */
190 static size_t *stack_vars_sorted
;
192 /* The phase of the stack frame. This is the known misalignment of
193 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
194 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
195 static int frame_phase
;
197 /* Used during expand_used_vars to remember if we saw any decls for
198 which we'd like to enable stack smashing protection. */
199 static bool has_protected_decls
;
201 /* Used during expand_used_vars. Remember if we say a character buffer
202 smaller than our cutoff threshold. Used for -Wstack-protector. */
203 static bool has_short_buffer
;
205 /* Compute the byte alignment to use for DECL. Ignore alignment
206 we can't do with expected alignment of the stack boundary. */
209 align_local_variable (tree decl
)
211 unsigned int align
= LOCAL_DECL_ALIGNMENT (decl
);
212 DECL_ALIGN (decl
) = align
;
213 return align
/ BITS_PER_UNIT
;
216 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
217 Return the frame offset. */
220 alloc_stack_frame_space (HOST_WIDE_INT size
, unsigned HOST_WIDE_INT align
)
222 HOST_WIDE_INT offset
, new_frame_offset
;
224 new_frame_offset
= frame_offset
;
225 if (FRAME_GROWS_DOWNWARD
)
227 new_frame_offset
-= size
+ frame_phase
;
228 new_frame_offset
&= -align
;
229 new_frame_offset
+= frame_phase
;
230 offset
= new_frame_offset
;
234 new_frame_offset
-= frame_phase
;
235 new_frame_offset
+= align
- 1;
236 new_frame_offset
&= -align
;
237 new_frame_offset
+= frame_phase
;
238 offset
= new_frame_offset
;
239 new_frame_offset
+= size
;
241 frame_offset
= new_frame_offset
;
243 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
244 frame_offset
= offset
= 0;
249 /* Accumulate DECL into STACK_VARS. */
252 add_stack_var (tree decl
)
256 if (stack_vars_num
>= stack_vars_alloc
)
258 if (stack_vars_alloc
)
259 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
261 stack_vars_alloc
= 32;
263 = XRESIZEVEC (struct stack_var
, stack_vars
, stack_vars_alloc
);
265 v
= &stack_vars
[stack_vars_num
];
268 v
->size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (decl
)), 1);
269 /* Ensure that all variables have size, so that &a != &b for any two
270 variables that are simultaneously live. */
273 v
->alignb
= align_local_variable (SSAVAR (decl
));
274 /* An alignment of zero can mightily confuse us later. */
275 gcc_assert (v
->alignb
!= 0);
277 /* All variables are initially in their own partition. */
278 v
->representative
= stack_vars_num
;
281 /* All variables initially conflict with no other. */
284 /* Ensure that this decl doesn't get put onto the list twice. */
285 set_rtl (decl
, pc_rtx
);
290 /* Make the decls associated with luid's X and Y conflict. */
293 add_stack_var_conflict (size_t x
, size_t y
)
295 struct stack_var
*a
= &stack_vars
[x
];
296 struct stack_var
*b
= &stack_vars
[y
];
298 a
->conflicts
= BITMAP_ALLOC (NULL
);
300 b
->conflicts
= BITMAP_ALLOC (NULL
);
301 bitmap_set_bit (a
->conflicts
, y
);
302 bitmap_set_bit (b
->conflicts
, x
);
305 /* Check whether the decls associated with luid's X and Y conflict. */
308 stack_var_conflict_p (size_t x
, size_t y
)
310 struct stack_var
*a
= &stack_vars
[x
];
311 struct stack_var
*b
= &stack_vars
[y
];
312 if (!a
->conflicts
|| !b
->conflicts
)
314 return bitmap_bit_p (a
->conflicts
, y
);
317 /* Returns true if TYPE is or contains a union type. */
320 aggregate_contains_union_type (tree type
)
324 if (TREE_CODE (type
) == UNION_TYPE
325 || TREE_CODE (type
) == QUAL_UNION_TYPE
)
327 if (TREE_CODE (type
) == ARRAY_TYPE
)
328 return aggregate_contains_union_type (TREE_TYPE (type
));
329 if (TREE_CODE (type
) != RECORD_TYPE
)
332 for (field
= TYPE_FIELDS (type
); field
; field
= DECL_CHAIN (field
))
333 if (TREE_CODE (field
) == FIELD_DECL
)
334 if (aggregate_contains_union_type (TREE_TYPE (field
)))
340 /* A subroutine of expand_used_vars. If two variables X and Y have alias
341 sets that do not conflict, then do add a conflict for these variables
342 in the interference graph. We also need to make sure to add conflicts
343 for union containing structures. Else RTL alias analysis comes along
344 and due to type based aliasing rules decides that for two overlapping
345 union temporaries { short s; int i; } accesses to the same mem through
346 different types may not alias and happily reorders stores across
347 life-time boundaries of the temporaries (See PR25654).
348 We also have to mind MEM_IN_STRUCT_P and MEM_SCALAR_P. */
351 add_alias_set_conflicts (void)
353 size_t i
, j
, n
= stack_vars_num
;
355 for (i
= 0; i
< n
; ++i
)
357 tree type_i
= TREE_TYPE (stack_vars
[i
].decl
);
358 bool aggr_i
= AGGREGATE_TYPE_P (type_i
);
361 contains_union
= aggregate_contains_union_type (type_i
);
362 for (j
= 0; j
< i
; ++j
)
364 tree type_j
= TREE_TYPE (stack_vars
[j
].decl
);
365 bool aggr_j
= AGGREGATE_TYPE_P (type_j
);
367 /* Either the objects conflict by means of type based
368 aliasing rules, or we need to add a conflict. */
369 || !objects_must_conflict_p (type_i
, type_j
)
370 /* In case the types do not conflict ensure that access
371 to elements will conflict. In case of unions we have
372 to be careful as type based aliasing rules may say
373 access to the same memory does not conflict. So play
374 safe and add a conflict in this case when
375 -fstrict-aliasing is used. */
376 || (contains_union
&& flag_strict_aliasing
))
377 add_stack_var_conflict (i
, j
);
382 /* A subroutine of partition_stack_vars. A comparison function for qsort,
383 sorting an array of indices by the properties of the object. */
386 stack_var_cmp (const void *a
, const void *b
)
388 size_t ia
= *(const size_t *)a
;
389 size_t ib
= *(const size_t *)b
;
390 unsigned int aligna
= stack_vars
[ia
].alignb
;
391 unsigned int alignb
= stack_vars
[ib
].alignb
;
392 HOST_WIDE_INT sizea
= stack_vars
[ia
].size
;
393 HOST_WIDE_INT sizeb
= stack_vars
[ib
].size
;
394 tree decla
= stack_vars
[ia
].decl
;
395 tree declb
= stack_vars
[ib
].decl
;
397 unsigned int uida
, uidb
;
399 /* Primary compare on "large" alignment. Large comes first. */
400 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
401 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
402 if (largea
!= largeb
)
403 return (int)largeb
- (int)largea
;
405 /* Secondary compare on size, decreasing */
411 /* Tertiary compare on true alignment, decreasing. */
417 /* Final compare on ID for sort stability, increasing.
418 Two SSA names are compared by their version, SSA names come before
419 non-SSA names, and two normal decls are compared by their DECL_UID. */
420 if (TREE_CODE (decla
) == SSA_NAME
)
422 if (TREE_CODE (declb
) == SSA_NAME
)
423 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
427 else if (TREE_CODE (declb
) == SSA_NAME
)
430 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
439 /* If the points-to solution *PI points to variables that are in a partition
440 together with other variables add all partition members to the pointed-to
444 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
445 struct pointer_map_t
*decls_to_partitions
,
446 struct pointer_set_t
*visited
, bitmap temp
)
454 /* The pointed-to vars bitmap is shared, it is enough to
456 || pointer_set_insert(visited
, pt
->vars
))
461 /* By using a temporary bitmap to store all members of the partitions
462 we have to add we make sure to visit each of the partitions only
464 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
466 || !bitmap_bit_p (temp
, i
))
467 && (part
= (bitmap
*) pointer_map_contains (decls_to_partitions
,
468 (void *)(size_t) i
)))
469 bitmap_ior_into (temp
, *part
);
470 if (!bitmap_empty_p (temp
))
471 bitmap_ior_into (pt
->vars
, temp
);
474 /* Update points-to sets based on partition info, so we can use them on RTL.
475 The bitmaps representing stack partitions will be saved until expand,
476 where partitioned decls used as bases in memory expressions will be
480 update_alias_info_with_stack_vars (void)
482 struct pointer_map_t
*decls_to_partitions
= NULL
;
484 tree var
= NULL_TREE
;
486 for (i
= 0; i
< stack_vars_num
; i
++)
490 struct ptr_info_def
*pi
;
492 /* Not interested in partitions with single variable. */
493 if (stack_vars
[i
].representative
!= i
494 || stack_vars
[i
].next
== EOC
)
497 if (!decls_to_partitions
)
499 decls_to_partitions
= pointer_map_create ();
500 cfun
->gimple_df
->decls_to_pointers
= pointer_map_create ();
503 /* Create an SSA_NAME that points to the partition for use
504 as base during alias-oracle queries on RTL for bases that
505 have been partitioned. */
506 if (var
== NULL_TREE
)
507 var
= create_tmp_var (ptr_type_node
, NULL
);
508 name
= make_ssa_name (var
, NULL
);
510 /* Create bitmaps representing partitions. They will be used for
511 points-to sets later, so use GGC alloc. */
512 part
= BITMAP_GGC_ALLOC ();
513 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
515 tree decl
= stack_vars
[j
].decl
;
516 unsigned int uid
= DECL_PT_UID (decl
);
517 /* We should never end up partitioning SSA names (though they
518 may end up on the stack). Neither should we allocate stack
519 space to something that is unused and thus unreferenced, except
520 for -O0 where we are preserving even unreferenced variables. */
521 gcc_assert (DECL_P (decl
)
523 || referenced_var_lookup (cfun
, DECL_UID (decl
))));
524 bitmap_set_bit (part
, uid
);
525 *((bitmap
*) pointer_map_insert (decls_to_partitions
,
526 (void *)(size_t) uid
)) = part
;
527 *((tree
*) pointer_map_insert (cfun
->gimple_df
->decls_to_pointers
,
531 /* Make the SSA name point to all partition members. */
532 pi
= get_ptr_info (name
);
533 pt_solution_set (&pi
->pt
, part
, false);
536 /* Make all points-to sets that contain one member of a partition
537 contain all members of the partition. */
538 if (decls_to_partitions
)
541 struct pointer_set_t
*visited
= pointer_set_create ();
542 bitmap temp
= BITMAP_ALLOC (NULL
);
544 for (i
= 1; i
< num_ssa_names
; i
++)
546 tree name
= ssa_name (i
);
547 struct ptr_info_def
*pi
;
550 && POINTER_TYPE_P (TREE_TYPE (name
))
551 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
552 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
556 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
557 decls_to_partitions
, visited
, temp
);
559 pointer_set_destroy (visited
);
560 pointer_map_destroy (decls_to_partitions
);
565 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
566 partitioning algorithm. Partitions A and B are known to be non-conflicting.
567 Merge them into a single partition A. */
570 union_stack_vars (size_t a
, size_t b
)
572 struct stack_var
*vb
= &stack_vars
[b
];
576 gcc_assert (stack_vars
[b
].next
== EOC
);
577 /* Add B to A's partition. */
578 stack_vars
[b
].next
= stack_vars
[a
].next
;
579 stack_vars
[b
].representative
= a
;
580 stack_vars
[a
].next
= b
;
582 /* Update the required alignment of partition A to account for B. */
583 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
584 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
586 /* Update the interference graph and merge the conflicts. */
589 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
590 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
591 BITMAP_FREE (vb
->conflicts
);
595 /* A subroutine of expand_used_vars. Binpack the variables into
596 partitions constrained by the interference graph. The overall
597 algorithm used is as follows:
599 Sort the objects by size in descending order.
604 Look for the largest non-conflicting object B with size <= S.
611 partition_stack_vars (void)
613 size_t si
, sj
, n
= stack_vars_num
;
615 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
616 for (si
= 0; si
< n
; ++si
)
617 stack_vars_sorted
[si
] = si
;
622 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
624 for (si
= 0; si
< n
; ++si
)
626 size_t i
= stack_vars_sorted
[si
];
627 unsigned int ialign
= stack_vars
[i
].alignb
;
629 /* Ignore objects that aren't partition representatives. If we
630 see a var that is not a partition representative, it must
631 have been merged earlier. */
632 if (stack_vars
[i
].representative
!= i
)
635 for (sj
= si
+ 1; sj
< n
; ++sj
)
637 size_t j
= stack_vars_sorted
[sj
];
638 unsigned int jalign
= stack_vars
[j
].alignb
;
640 /* Ignore objects that aren't partition representatives. */
641 if (stack_vars
[j
].representative
!= j
)
644 /* Ignore conflicting objects. */
645 if (stack_var_conflict_p (i
, j
))
648 /* Do not mix objects of "small" (supported) alignment
649 and "large" (unsupported) alignment. */
650 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
651 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
654 /* UNION the objects, placing J at OFFSET. */
655 union_stack_vars (i
, j
);
659 update_alias_info_with_stack_vars ();
662 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
665 dump_stack_var_partition (void)
667 size_t si
, i
, j
, n
= stack_vars_num
;
669 for (si
= 0; si
< n
; ++si
)
671 i
= stack_vars_sorted
[si
];
673 /* Skip variables that aren't partition representatives, for now. */
674 if (stack_vars
[i
].representative
!= i
)
677 fprintf (dump_file
, "Partition %lu: size " HOST_WIDE_INT_PRINT_DEC
678 " align %u\n", (unsigned long) i
, stack_vars
[i
].size
,
679 stack_vars
[i
].alignb
);
681 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
683 fputc ('\t', dump_file
);
684 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
686 fputc ('\n', dump_file
);
690 /* Assign rtl to DECL at BASE + OFFSET. */
693 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
694 HOST_WIDE_INT offset
)
699 /* If this fails, we've overflowed the stack frame. Error nicely? */
700 gcc_assert (offset
== trunc_int_for_mode (offset
, Pmode
));
702 x
= plus_constant (base
, offset
);
703 x
= gen_rtx_MEM (DECL_MODE (SSAVAR (decl
)), x
);
705 if (TREE_CODE (decl
) != SSA_NAME
)
707 /* Set alignment we actually gave this decl if it isn't an SSA name.
708 If it is we generate stack slots only accidentally so it isn't as
709 important, we'll simply use the alignment that is already set. */
710 if (base
== virtual_stack_vars_rtx
)
711 offset
-= frame_phase
;
712 align
= offset
& -offset
;
713 align
*= BITS_PER_UNIT
;
714 if (align
== 0 || align
> base_align
)
717 /* One would think that we could assert that we're not decreasing
718 alignment here, but (at least) the i386 port does exactly this
719 via the MINIMUM_ALIGNMENT hook. */
721 DECL_ALIGN (decl
) = align
;
722 DECL_USER_ALIGN (decl
) = 0;
725 set_mem_attributes (x
, SSAVAR (decl
), true);
729 /* A subroutine of expand_used_vars. Give each partition representative
730 a unique location within the stack frame. Update each partition member
731 with that location. */
734 expand_stack_vars (bool (*pred
) (tree
))
736 size_t si
, i
, j
, n
= stack_vars_num
;
737 HOST_WIDE_INT large_size
= 0, large_alloc
= 0;
738 rtx large_base
= NULL
;
739 unsigned large_align
= 0;
742 /* Determine if there are any variables requiring "large" alignment.
743 Since these are dynamically allocated, we only process these if
744 no predicate involved. */
745 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
746 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
748 /* Find the total size of these variables. */
749 for (si
= 0; si
< n
; ++si
)
753 i
= stack_vars_sorted
[si
];
754 alignb
= stack_vars
[i
].alignb
;
756 /* Stop when we get to the first decl with "small" alignment. */
757 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
760 /* Skip variables that aren't partition representatives. */
761 if (stack_vars
[i
].representative
!= i
)
764 /* Skip variables that have already had rtl assigned. See also
765 add_stack_var where we perpetrate this pc_rtx hack. */
766 decl
= stack_vars
[i
].decl
;
767 if ((TREE_CODE (decl
) == SSA_NAME
768 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
769 : DECL_RTL (decl
)) != pc_rtx
)
772 large_size
+= alignb
- 1;
773 large_size
&= -(HOST_WIDE_INT
)alignb
;
774 large_size
+= stack_vars
[i
].size
;
777 /* If there were any, allocate space. */
779 large_base
= allocate_dynamic_stack_space (GEN_INT (large_size
), 0,
783 for (si
= 0; si
< n
; ++si
)
786 unsigned base_align
, alignb
;
787 HOST_WIDE_INT offset
;
789 i
= stack_vars_sorted
[si
];
791 /* Skip variables that aren't partition representatives, for now. */
792 if (stack_vars
[i
].representative
!= i
)
795 /* Skip variables that have already had rtl assigned. See also
796 add_stack_var where we perpetrate this pc_rtx hack. */
797 decl
= stack_vars
[i
].decl
;
798 if ((TREE_CODE (decl
) == SSA_NAME
799 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)]
800 : DECL_RTL (decl
)) != pc_rtx
)
803 /* Check the predicate to see whether this variable should be
804 allocated in this pass. */
805 if (pred
&& !pred (decl
))
808 alignb
= stack_vars
[i
].alignb
;
809 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
811 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
812 base
= virtual_stack_vars_rtx
;
813 base_align
= crtl
->max_used_stack_slot_alignment
;
817 /* Large alignment is only processed in the last pass. */
820 gcc_assert (large_base
!= NULL
);
822 large_alloc
+= alignb
- 1;
823 large_alloc
&= -(HOST_WIDE_INT
)alignb
;
824 offset
= large_alloc
;
825 large_alloc
+= stack_vars
[i
].size
;
828 base_align
= large_align
;
831 /* Create rtl for each variable based on their location within the
833 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
835 expand_one_stack_var_at (stack_vars
[j
].decl
,
841 gcc_assert (large_alloc
== large_size
);
844 /* Take into account all sizes of partitions and reset DECL_RTLs. */
846 account_stack_vars (void)
848 size_t si
, j
, i
, n
= stack_vars_num
;
849 HOST_WIDE_INT size
= 0;
851 for (si
= 0; si
< n
; ++si
)
853 i
= stack_vars_sorted
[si
];
855 /* Skip variables that aren't partition representatives, for now. */
856 if (stack_vars
[i
].representative
!= i
)
859 size
+= stack_vars
[i
].size
;
860 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
861 set_rtl (stack_vars
[j
].decl
, NULL
);
866 /* A subroutine of expand_one_var. Called to immediately assign rtl
867 to a variable to be allocated in the stack frame. */
870 expand_one_stack_var (tree var
)
872 HOST_WIDE_INT size
, offset
;
875 size
= tree_low_cst (DECL_SIZE_UNIT (SSAVAR (var
)), 1);
876 byte_align
= align_local_variable (SSAVAR (var
));
878 /* We handle highly aligned variables in expand_stack_vars. */
879 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
881 offset
= alloc_stack_frame_space (size
, byte_align
);
883 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
884 crtl
->max_used_stack_slot_alignment
, offset
);
887 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
888 that will reside in a hard register. */
891 expand_one_hard_reg_var (tree var
)
893 rest_of_decl_compilation (var
, 0, 0);
896 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
897 that will reside in a pseudo register. */
900 expand_one_register_var (tree var
)
902 tree decl
= SSAVAR (var
);
903 tree type
= TREE_TYPE (decl
);
904 enum machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
905 rtx x
= gen_reg_rtx (reg_mode
);
909 /* Note if the object is a user variable. */
910 if (!DECL_ARTIFICIAL (decl
))
913 if (POINTER_TYPE_P (type
))
914 mark_reg_pointer (x
, get_pointer_alignment (var
));
917 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
918 has some associated error, e.g. its type is error-mark. We just need
919 to pick something that won't crash the rest of the compiler. */
922 expand_one_error_var (tree var
)
924 enum machine_mode mode
= DECL_MODE (var
);
928 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
929 else if (mode
== VOIDmode
)
932 x
= gen_reg_rtx (mode
);
934 SET_DECL_RTL (var
, x
);
937 /* A subroutine of expand_one_var. VAR is a variable that will be
938 allocated to the local stack frame. Return true if we wish to
939 add VAR to STACK_VARS so that it will be coalesced with other
940 variables. Return false to allocate VAR immediately.
942 This function is used to reduce the number of variables considered
943 for coalescing, which reduces the size of the quadratic problem. */
946 defer_stack_allocation (tree var
, bool toplevel
)
948 /* If stack protection is enabled, *all* stack variables must be deferred,
949 so that we can re-order the strings to the top of the frame. */
950 if (flag_stack_protect
)
953 /* We handle "large" alignment via dynamic allocation. We want to handle
954 this extra complication in only one place, so defer them. */
955 if (DECL_ALIGN (var
) > MAX_SUPPORTED_STACK_ALIGNMENT
)
958 /* Variables in the outermost scope automatically conflict with
959 every other variable. The only reason to want to defer them
960 at all is that, after sorting, we can more efficiently pack
961 small variables in the stack frame. Continue to defer at -O2. */
962 if (toplevel
&& optimize
< 2)
965 /* Without optimization, *most* variables are allocated from the
966 stack, which makes the quadratic problem large exactly when we
967 want compilation to proceed as quickly as possible. On the
968 other hand, we don't want the function's stack frame size to
969 get completely out of hand. So we avoid adding scalars and
970 "small" aggregates to the list at all. */
971 if (optimize
== 0 && tree_low_cst (DECL_SIZE_UNIT (var
), 1) < 32)
977 /* A subroutine of expand_used_vars. Expand one variable according to
978 its flavor. Variables to be placed on the stack are not actually
979 expanded yet, merely recorded.
980 When REALLY_EXPAND is false, only add stack values to be allocated.
981 Return stack usage this variable is supposed to take.
985 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
987 unsigned int align
= BITS_PER_UNIT
;
992 if (TREE_TYPE (var
) != error_mark_node
&& TREE_CODE (var
) == VAR_DECL
)
994 /* Because we don't know if VAR will be in register or on stack,
995 we conservatively assume it will be on stack even if VAR is
996 eventually put into register after RA pass. For non-automatic
997 variables, which won't be on stack, we collect alignment of
998 type and ignore user specified alignment. */
999 if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1000 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1001 TYPE_MODE (TREE_TYPE (var
)),
1002 TYPE_ALIGN (TREE_TYPE (var
)));
1003 else if (DECL_HAS_VALUE_EXPR_P (var
)
1004 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1005 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1006 or variables which were assigned a stack slot already by
1007 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1008 changed from the offset chosen to it. */
1009 align
= crtl
->stack_alignment_estimated
;
1011 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1013 /* If the variable alignment is very large we'll dynamicaly allocate
1014 it, which means that in-frame portion is just a pointer. */
1015 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1016 align
= POINTER_SIZE
;
1019 if (SUPPORTS_STACK_ALIGNMENT
1020 && crtl
->stack_alignment_estimated
< align
)
1022 /* stack_alignment_estimated shouldn't change after stack
1023 realign decision made */
1024 gcc_assert(!crtl
->stack_realign_processed
);
1025 crtl
->stack_alignment_estimated
= align
;
1028 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1029 So here we only make sure stack_alignment_needed >= align. */
1030 if (crtl
->stack_alignment_needed
< align
)
1031 crtl
->stack_alignment_needed
= align
;
1032 if (crtl
->max_used_stack_slot_alignment
< align
)
1033 crtl
->max_used_stack_slot_alignment
= align
;
1035 if (TREE_CODE (origvar
) == SSA_NAME
)
1037 gcc_assert (TREE_CODE (var
) != VAR_DECL
1038 || (!DECL_EXTERNAL (var
)
1039 && !DECL_HAS_VALUE_EXPR_P (var
)
1040 && !TREE_STATIC (var
)
1041 && TREE_TYPE (var
) != error_mark_node
1042 && !DECL_HARD_REGISTER (var
)
1045 if (TREE_CODE (var
) != VAR_DECL
&& TREE_CODE (origvar
) != SSA_NAME
)
1047 else if (DECL_EXTERNAL (var
))
1049 else if (DECL_HAS_VALUE_EXPR_P (var
))
1051 else if (TREE_STATIC (var
))
1053 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1055 else if (TREE_TYPE (var
) == error_mark_node
)
1058 expand_one_error_var (var
);
1060 else if (TREE_CODE (var
) == VAR_DECL
&& DECL_HARD_REGISTER (var
))
1063 expand_one_hard_reg_var (var
);
1065 else if (use_register_for_decl (var
))
1068 expand_one_register_var (origvar
);
1070 else if (!host_integerp (DECL_SIZE_UNIT (var
), 1))
1074 error ("size of variable %q+D is too large", var
);
1075 expand_one_error_var (var
);
1078 else if (defer_stack_allocation (var
, toplevel
))
1079 add_stack_var (origvar
);
1083 expand_one_stack_var (origvar
);
1084 return tree_low_cst (DECL_SIZE_UNIT (var
), 1);
1089 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1090 expanding variables. Those variables that can be put into registers
1091 are allocated pseudos; those that can't are put on the stack.
1093 TOPLEVEL is true if this is the outermost BLOCK. */
1096 expand_used_vars_for_block (tree block
, bool toplevel
)
1098 size_t i
, j
, old_sv_num
, this_sv_num
, new_sv_num
;
1101 old_sv_num
= toplevel
? 0 : stack_vars_num
;
1103 /* Expand all variables at this level. */
1104 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1106 && ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1107 || !DECL_NONSHAREABLE (t
)))
1108 expand_one_var (t
, toplevel
, true);
1110 this_sv_num
= stack_vars_num
;
1112 /* Expand all variables at containing levels. */
1113 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1114 expand_used_vars_for_block (t
, false);
1116 /* Since we do not track exact variable lifetimes (which is not even
1117 possible for variables whose address escapes), we mirror the block
1118 tree in the interference graph. Here we cause all variables at this
1119 level, and all sublevels, to conflict. */
1120 if (old_sv_num
< this_sv_num
)
1122 new_sv_num
= stack_vars_num
;
1124 for (i
= old_sv_num
; i
< new_sv_num
; ++i
)
1125 for (j
= i
< this_sv_num
? i
: this_sv_num
; j
-- > old_sv_num
;)
1126 add_stack_var_conflict (i
, j
);
1130 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1131 and clear TREE_USED on all local variables. */
1134 clear_tree_used (tree block
)
1138 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1139 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1140 if ((TREE_CODE (t
) != VAR_DECL
&& TREE_CODE (t
) != RESULT_DECL
)
1141 || !DECL_NONSHAREABLE (t
))
1144 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1145 clear_tree_used (t
);
1148 /* Examine TYPE and determine a bit mask of the following features. */
1150 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1151 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1152 #define SPCT_HAS_ARRAY 4
1153 #define SPCT_HAS_AGGREGATE 8
1156 stack_protect_classify_type (tree type
)
1158 unsigned int ret
= 0;
1161 switch (TREE_CODE (type
))
1164 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1165 if (t
== char_type_node
1166 || t
== signed_char_type_node
1167 || t
== unsigned_char_type_node
)
1169 unsigned HOST_WIDE_INT max
= PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
);
1170 unsigned HOST_WIDE_INT len
;
1172 if (!TYPE_SIZE_UNIT (type
)
1173 || !host_integerp (TYPE_SIZE_UNIT (type
), 1))
1176 len
= tree_low_cst (TYPE_SIZE_UNIT (type
), 1);
1179 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1181 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1184 ret
= SPCT_HAS_ARRAY
;
1188 case QUAL_UNION_TYPE
:
1190 ret
= SPCT_HAS_AGGREGATE
;
1191 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1192 if (TREE_CODE (t
) == FIELD_DECL
)
1193 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1203 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1204 part of the local stack frame. Remember if we ever return nonzero for
1205 any variable in this function. The return value is the phase number in
1206 which the variable should be allocated. */
1209 stack_protect_decl_phase (tree decl
)
1211 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1214 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1215 has_short_buffer
= true;
1217 if (flag_stack_protect
== 2)
1219 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1220 && !(bits
& SPCT_HAS_AGGREGATE
))
1222 else if (bits
& SPCT_HAS_ARRAY
)
1226 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1229 has_protected_decls
= true;
1234 /* Two helper routines that check for phase 1 and phase 2. These are used
1235 as callbacks for expand_stack_vars. */
1238 stack_protect_decl_phase_1 (tree decl
)
1240 return stack_protect_decl_phase (decl
) == 1;
1244 stack_protect_decl_phase_2 (tree decl
)
1246 return stack_protect_decl_phase (decl
) == 2;
1249 /* Ensure that variables in different stack protection phases conflict
1250 so that they are not merged and share the same stack slot. */
1253 add_stack_protection_conflicts (void)
1255 size_t i
, j
, n
= stack_vars_num
;
1256 unsigned char *phase
;
1258 phase
= XNEWVEC (unsigned char, n
);
1259 for (i
= 0; i
< n
; ++i
)
1260 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1262 for (i
= 0; i
< n
; ++i
)
1264 unsigned char ph_i
= phase
[i
];
1265 for (j
= 0; j
< i
; ++j
)
1266 if (ph_i
!= phase
[j
])
1267 add_stack_var_conflict (i
, j
);
1273 /* Create a decl for the guard at the top of the stack frame. */
1276 create_stack_guard (void)
1278 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1279 VAR_DECL
, NULL
, ptr_type_node
);
1280 TREE_THIS_VOLATILE (guard
) = 1;
1281 TREE_USED (guard
) = 1;
1282 expand_one_stack_var (guard
);
1283 crtl
->stack_protect_guard
= guard
;
1286 /* Prepare for expanding variables. */
1288 init_vars_expansion (void)
1292 /* Set TREE_USED on all variables in the local_decls. */
1293 FOR_EACH_LOCAL_DECL (cfun
, ix
, t
)
1296 /* Clear TREE_USED on all variables associated with a block scope. */
1297 clear_tree_used (DECL_INITIAL (current_function_decl
));
1299 /* Initialize local stack smashing state. */
1300 has_protected_decls
= false;
1301 has_short_buffer
= false;
1304 /* Free up stack variable graph data. */
1306 fini_vars_expansion (void)
1308 size_t i
, n
= stack_vars_num
;
1309 for (i
= 0; i
< n
; i
++)
1310 BITMAP_FREE (stack_vars
[i
].conflicts
);
1311 XDELETEVEC (stack_vars
);
1312 XDELETEVEC (stack_vars_sorted
);
1314 stack_vars_alloc
= stack_vars_num
= 0;
1317 /* Make a fair guess for the size of the stack frame of the function
1318 in NODE. This doesn't have to be exact, the result is only used in
1319 the inline heuristics. So we don't want to run the full stack var
1320 packing algorithm (which is quadratic in the number of stack vars).
1321 Instead, we calculate the total size of all stack vars. This turns
1322 out to be a pretty fair estimate -- packing of stack vars doesn't
1323 happen very often. */
1326 estimated_stack_frame_size (struct cgraph_node
*node
)
1328 HOST_WIDE_INT size
= 0;
1331 tree old_cur_fun_decl
= current_function_decl
;
1332 referenced_var_iterator rvi
;
1333 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1335 current_function_decl
= node
->decl
;
1338 gcc_checking_assert (gimple_referenced_vars (fn
));
1339 FOR_EACH_REFERENCED_VAR (fn
, var
, rvi
)
1340 size
+= expand_one_var (var
, true, false);
1342 if (stack_vars_num
> 0)
1344 /* Fake sorting the stack vars for account_stack_vars (). */
1345 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1346 for (i
= 0; i
< stack_vars_num
; ++i
)
1347 stack_vars_sorted
[i
] = i
;
1348 size
+= account_stack_vars ();
1349 fini_vars_expansion ();
1352 current_function_decl
= old_cur_fun_decl
;
1356 /* Expand all variables used in the function. */
1359 expand_used_vars (void)
1361 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
1362 VEC(tree
,heap
) *maybe_local_decls
= NULL
;
1366 /* Compute the phase of the stack frame for this function. */
1368 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1369 int off
= STARTING_FRAME_OFFSET
% align
;
1370 frame_phase
= off
? align
- off
: 0;
1373 init_vars_expansion ();
1375 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
1377 tree var
= partition_to_var (SA
.map
, i
);
1379 gcc_assert (is_gimple_reg (var
));
1380 if (TREE_CODE (SSA_NAME_VAR (var
)) == VAR_DECL
)
1381 expand_one_var (var
, true, true);
1384 /* This is a PARM_DECL or RESULT_DECL. For those partitions that
1385 contain the default def (representing the parm or result itself)
1386 we don't do anything here. But those which don't contain the
1387 default def (representing a temporary based on the parm/result)
1388 we need to allocate space just like for normal VAR_DECLs. */
1389 if (!bitmap_bit_p (SA
.partition_has_default_def
, i
))
1391 expand_one_var (var
, true, true);
1392 gcc_assert (SA
.partition_to_pseudo
[i
]);
1397 /* At this point all variables on the local_decls with TREE_USED
1398 set are not associated with any block scope. Lay them out. */
1400 len
= VEC_length (tree
, cfun
->local_decls
);
1401 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
1403 bool expand_now
= false;
1405 /* Expanded above already. */
1406 if (is_gimple_reg (var
))
1408 TREE_USED (var
) = 0;
1411 /* We didn't set a block for static or extern because it's hard
1412 to tell the difference between a global variable (re)declared
1413 in a local scope, and one that's really declared there to
1414 begin with. And it doesn't really matter much, since we're
1415 not giving them stack space. Expand them now. */
1416 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
1419 /* If the variable is not associated with any block, then it
1420 was created by the optimizers, and could be live anywhere
1422 else if (TREE_USED (var
))
1425 /* Finally, mark all variables on the list as used. We'll use
1426 this in a moment when we expand those associated with scopes. */
1427 TREE_USED (var
) = 1;
1430 expand_one_var (var
, true, true);
1433 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
1435 rtx rtl
= DECL_RTL_IF_SET (var
);
1437 /* Keep artificial non-ignored vars in cfun->local_decls
1438 chain until instantiate_decls. */
1439 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1440 add_local_decl (cfun
, var
);
1441 else if (rtl
== NULL_RTX
)
1442 /* If rtl isn't set yet, which can happen e.g. with
1443 -fstack-protector, retry before returning from this
1445 VEC_safe_push (tree
, heap
, maybe_local_decls
, var
);
1449 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
1451 +-----------------+-----------------+
1452 | ...processed... | ...duplicates...|
1453 +-----------------+-----------------+
1455 +-- LEN points here.
1457 We just want the duplicates, as those are the artificial
1458 non-ignored vars that we want to keep until instantiate_decls.
1459 Move them down and truncate the array. */
1460 if (!VEC_empty (tree
, cfun
->local_decls
))
1461 VEC_block_remove (tree
, cfun
->local_decls
, 0, len
);
1463 /* At this point, all variables within the block tree with TREE_USED
1464 set are actually used by the optimized function. Lay them out. */
1465 expand_used_vars_for_block (outer_block
, true);
1467 if (stack_vars_num
> 0)
1469 /* Due to the way alias sets work, no variables with non-conflicting
1470 alias sets may be assigned the same address. Add conflicts to
1472 add_alias_set_conflicts ();
1474 /* If stack protection is enabled, we don't share space between
1475 vulnerable data and non-vulnerable data. */
1476 if (flag_stack_protect
)
1477 add_stack_protection_conflicts ();
1479 /* Now that we have collected all stack variables, and have computed a
1480 minimal interference graph, attempt to save some stack space. */
1481 partition_stack_vars ();
1483 dump_stack_var_partition ();
1486 /* There are several conditions under which we should create a
1487 stack guard: protect-all, alloca used, protected decls present. */
1488 if (flag_stack_protect
== 2
1489 || (flag_stack_protect
1490 && (cfun
->calls_alloca
|| has_protected_decls
)))
1491 create_stack_guard ();
1493 /* Assign rtl to each variable based on these partitions. */
1494 if (stack_vars_num
> 0)
1496 /* Reorder decls to be protected by iterating over the variables
1497 array multiple times, and allocating out of each phase in turn. */
1498 /* ??? We could probably integrate this into the qsort we did
1499 earlier, such that we naturally see these variables first,
1500 and thus naturally allocate things in the right order. */
1501 if (has_protected_decls
)
1503 /* Phase 1 contains only character arrays. */
1504 expand_stack_vars (stack_protect_decl_phase_1
);
1506 /* Phase 2 contains other kinds of arrays. */
1507 if (flag_stack_protect
== 2)
1508 expand_stack_vars (stack_protect_decl_phase_2
);
1511 expand_stack_vars (NULL
);
1513 fini_vars_expansion ();
1516 /* If there were any artificial non-ignored vars without rtl
1517 found earlier, see if deferred stack allocation hasn't assigned
1519 FOR_EACH_VEC_ELT_REVERSE (tree
, maybe_local_decls
, i
, var
)
1521 rtx rtl
= DECL_RTL_IF_SET (var
);
1523 /* Keep artificial non-ignored vars in cfun->local_decls
1524 chain until instantiate_decls. */
1525 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
1526 add_local_decl (cfun
, var
);
1528 VEC_free (tree
, heap
, maybe_local_decls
);
1530 /* If the target requires that FRAME_OFFSET be aligned, do it. */
1531 if (STACK_ALIGNMENT_NEEDED
)
1533 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
1534 if (!FRAME_GROWS_DOWNWARD
)
1535 frame_offset
+= align
- 1;
1536 frame_offset
&= -align
;
1541 /* If we need to produce a detailed dump, print the tree representation
1542 for STMT to the dump file. SINCE is the last RTX after which the RTL
1543 generated for STMT should have been appended. */
1546 maybe_dump_rtl_for_gimple_stmt (gimple stmt
, rtx since
)
1548 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
1550 fprintf (dump_file
, "\n;; ");
1551 print_gimple_stmt (dump_file
, stmt
, 0,
1552 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
1553 fprintf (dump_file
, "\n");
1555 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
1559 /* Maps the blocks that do not contain tree labels to rtx labels. */
1561 static struct pointer_map_t
*lab_rtx_for_bb
;
1563 /* Returns the label_rtx expression for a label starting basic block BB. */
1566 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
1568 gimple_stmt_iterator gsi
;
1573 if (bb
->flags
& BB_RTL
)
1574 return block_label (bb
);
1576 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
1580 /* Find the tree label if it is present. */
1582 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
1584 lab_stmt
= gsi_stmt (gsi
);
1585 if (gimple_code (lab_stmt
) != GIMPLE_LABEL
)
1588 lab
= gimple_label_label (lab_stmt
);
1589 if (DECL_NONLOCAL (lab
))
1592 return label_rtx (lab
);
1595 elt
= pointer_map_insert (lab_rtx_for_bb
, bb
);
1596 *elt
= gen_label_rtx ();
1601 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
1602 of a basic block where we just expanded the conditional at the end,
1603 possibly clean up the CFG and instruction sequence. LAST is the
1604 last instruction before the just emitted jump sequence. */
1607 maybe_cleanup_end_of_block (edge e
, rtx last
)
1609 /* Special case: when jumpif decides that the condition is
1610 trivial it emits an unconditional jump (and the necessary
1611 barrier). But we still have two edges, the fallthru one is
1612 wrong. purge_dead_edges would clean this up later. Unfortunately
1613 we have to insert insns (and split edges) before
1614 find_many_sub_basic_blocks and hence before purge_dead_edges.
1615 But splitting edges might create new blocks which depend on the
1616 fact that if there are two edges there's no barrier. So the
1617 barrier would get lost and verify_flow_info would ICE. Instead
1618 of auditing all edge splitters to care for the barrier (which
1619 normally isn't there in a cleaned CFG), fix it here. */
1620 if (BARRIER_P (get_last_insn ()))
1624 /* Now, we have a single successor block, if we have insns to
1625 insert on the remaining edge we potentially will insert
1626 it at the end of this block (if the dest block isn't feasible)
1627 in order to avoid splitting the edge. This insertion will take
1628 place in front of the last jump. But we might have emitted
1629 multiple jumps (conditional and one unconditional) to the
1630 same destination. Inserting in front of the last one then
1631 is a problem. See PR 40021. We fix this by deleting all
1632 jumps except the last unconditional one. */
1633 insn
= PREV_INSN (get_last_insn ());
1634 /* Make sure we have an unconditional jump. Otherwise we're
1636 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
1637 for (insn
= PREV_INSN (insn
); insn
!= last
;)
1639 insn
= PREV_INSN (insn
);
1640 if (JUMP_P (NEXT_INSN (insn
)))
1642 if (!any_condjump_p (NEXT_INSN (insn
)))
1644 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
1645 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
1647 delete_insn (NEXT_INSN (insn
));
1653 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
1654 Returns a new basic block if we've terminated the current basic
1655 block and created a new one. */
1658 expand_gimple_cond (basic_block bb
, gimple stmt
)
1660 basic_block new_bb
, dest
;
1665 enum tree_code code
;
1668 code
= gimple_cond_code (stmt
);
1669 op0
= gimple_cond_lhs (stmt
);
1670 op1
= gimple_cond_rhs (stmt
);
1671 /* We're sometimes presented with such code:
1675 This would expand to two comparisons which then later might
1676 be cleaned up by combine. But some pattern matchers like if-conversion
1677 work better when there's only one compare, so make up for this
1678 here as special exception if TER would have made the same change. */
1679 if (gimple_cond_single_var_p (stmt
)
1681 && TREE_CODE (op0
) == SSA_NAME
1682 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
1684 gimple second
= SSA_NAME_DEF_STMT (op0
);
1685 if (gimple_code (second
) == GIMPLE_ASSIGN
)
1687 enum tree_code code2
= gimple_assign_rhs_code (second
);
1688 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
1691 op0
= gimple_assign_rhs1 (second
);
1692 op1
= gimple_assign_rhs2 (second
);
1694 /* If jumps are cheap turn some more codes into
1696 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4)
1698 if ((code2
== BIT_AND_EXPR
1699 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
1700 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
1701 || code2
== TRUTH_AND_EXPR
)
1703 code
= TRUTH_ANDIF_EXPR
;
1704 op0
= gimple_assign_rhs1 (second
);
1705 op1
= gimple_assign_rhs2 (second
);
1707 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
1709 code
= TRUTH_ORIF_EXPR
;
1710 op0
= gimple_assign_rhs1 (second
);
1711 op1
= gimple_assign_rhs2 (second
);
1717 last2
= last
= get_last_insn ();
1719 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
1720 set_curr_insn_source_location (gimple_location (stmt
));
1721 set_curr_insn_block (gimple_block (stmt
));
1723 /* These flags have no purpose in RTL land. */
1724 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
1725 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
1727 /* We can either have a pure conditional jump with one fallthru edge or
1728 two-way jump that needs to be decomposed into two basic blocks. */
1729 if (false_edge
->dest
== bb
->next_bb
)
1731 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1732 true_edge
->probability
);
1733 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1734 if (true_edge
->goto_locus
)
1736 set_curr_insn_source_location (true_edge
->goto_locus
);
1737 set_curr_insn_block (true_edge
->goto_block
);
1738 true_edge
->goto_locus
= curr_insn_locator ();
1740 true_edge
->goto_block
= NULL
;
1741 false_edge
->flags
|= EDGE_FALLTHRU
;
1742 maybe_cleanup_end_of_block (false_edge
, last
);
1745 if (true_edge
->dest
== bb
->next_bb
)
1747 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
1748 false_edge
->probability
);
1749 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
1750 if (false_edge
->goto_locus
)
1752 set_curr_insn_source_location (false_edge
->goto_locus
);
1753 set_curr_insn_block (false_edge
->goto_block
);
1754 false_edge
->goto_locus
= curr_insn_locator ();
1756 false_edge
->goto_block
= NULL
;
1757 true_edge
->flags
|= EDGE_FALLTHRU
;
1758 maybe_cleanup_end_of_block (true_edge
, last
);
1762 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
1763 true_edge
->probability
);
1764 last
= get_last_insn ();
1765 if (false_edge
->goto_locus
)
1767 set_curr_insn_source_location (false_edge
->goto_locus
);
1768 set_curr_insn_block (false_edge
->goto_block
);
1769 false_edge
->goto_locus
= curr_insn_locator ();
1771 false_edge
->goto_block
= NULL
;
1772 emit_jump (label_rtx_for_bb (false_edge
->dest
));
1775 if (BARRIER_P (BB_END (bb
)))
1776 BB_END (bb
) = PREV_INSN (BB_END (bb
));
1777 update_bb_for_insn (bb
);
1779 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
1780 dest
= false_edge
->dest
;
1781 redirect_edge_succ (false_edge
, new_bb
);
1782 false_edge
->flags
|= EDGE_FALLTHRU
;
1783 new_bb
->count
= false_edge
->count
;
1784 new_bb
->frequency
= EDGE_FREQUENCY (false_edge
);
1785 new_edge
= make_edge (new_bb
, dest
, 0);
1786 new_edge
->probability
= REG_BR_PROB_BASE
;
1787 new_edge
->count
= new_bb
->count
;
1788 if (BARRIER_P (BB_END (new_bb
)))
1789 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
1790 update_bb_for_insn (new_bb
);
1792 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
1794 if (true_edge
->goto_locus
)
1796 set_curr_insn_source_location (true_edge
->goto_locus
);
1797 set_curr_insn_block (true_edge
->goto_block
);
1798 true_edge
->goto_locus
= curr_insn_locator ();
1800 true_edge
->goto_block
= NULL
;
1805 /* Mark all calls that can have a transaction restart. */
1808 mark_transaction_restart_calls (gimple stmt
)
1810 struct tm_restart_node dummy
;
1813 if (!cfun
->gimple_df
->tm_restart
)
1817 slot
= htab_find_slot (cfun
->gimple_df
->tm_restart
, &dummy
, NO_INSERT
);
1820 struct tm_restart_node
*n
= (struct tm_restart_node
*) *slot
;
1821 tree list
= n
->label_or_list
;
1824 for (insn
= next_real_insn (get_last_insn ());
1826 insn
= next_real_insn (insn
))
1829 if (TREE_CODE (list
) == LABEL_DECL
)
1830 add_reg_note (insn
, REG_TM
, label_rtx (list
));
1832 for (; list
; list
= TREE_CHAIN (list
))
1833 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
1837 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
1841 expand_call_stmt (gimple stmt
)
1843 tree exp
, decl
, lhs
;
1847 if (gimple_call_internal_p (stmt
))
1849 expand_internal_call (stmt
);
1853 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
1855 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
1856 decl
= gimple_call_fndecl (stmt
);
1857 builtin_p
= decl
&& DECL_BUILT_IN (decl
);
1859 /* If this is not a builtin function, the function type through which the
1860 call is made may be different from the type of the function. */
1863 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
1864 CALL_EXPR_FN (exp
));
1866 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
1867 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
1869 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
1871 tree arg
= gimple_call_arg (stmt
, i
);
1873 /* TER addresses into arguments of builtin functions so we have a
1874 chance to infer more correct alignment information. See PR39954. */
1876 && TREE_CODE (arg
) == SSA_NAME
1877 && (def
= get_gimple_for_ssa_name (arg
))
1878 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
1879 arg
= gimple_assign_rhs1 (def
);
1880 CALL_EXPR_ARG (exp
, i
) = arg
;
1883 if (gimple_has_side_effects (stmt
))
1884 TREE_SIDE_EFFECTS (exp
) = 1;
1886 if (gimple_call_nothrow_p (stmt
))
1887 TREE_NOTHROW (exp
) = 1;
1889 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
1890 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
1892 && DECL_BUILT_IN_CLASS (decl
) == BUILT_IN_NORMAL
1893 && (DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA
1894 || DECL_FUNCTION_CODE (decl
) == BUILT_IN_ALLOCA_WITH_ALIGN
))
1895 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
1897 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
1898 CALL_CANNOT_INLINE_P (exp
) = gimple_call_cannot_inline_p (stmt
);
1899 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
1900 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
1901 TREE_BLOCK (exp
) = gimple_block (stmt
);
1903 /* Ensure RTL is created for debug args. */
1904 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
1906 VEC(tree
, gc
) **debug_args
= decl_debug_args_lookup (decl
);
1911 for (ix
= 1; VEC_iterate (tree
, *debug_args
, ix
, dtemp
); ix
+= 2)
1913 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
1914 expand_debug_expr (dtemp
);
1918 lhs
= gimple_call_lhs (stmt
);
1920 expand_assignment (lhs
, exp
, false);
1922 expand_expr_real_1 (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
, NULL
);
1924 mark_transaction_restart_calls (stmt
);
1927 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
1928 STMT that doesn't require special handling for outgoing edges. That
1929 is no tailcalls and no GIMPLE_COND. */
1932 expand_gimple_stmt_1 (gimple stmt
)
1936 set_curr_insn_source_location (gimple_location (stmt
));
1937 set_curr_insn_block (gimple_block (stmt
));
1939 switch (gimple_code (stmt
))
1942 op0
= gimple_goto_dest (stmt
);
1943 if (TREE_CODE (op0
) == LABEL_DECL
)
1946 expand_computed_goto (op0
);
1949 expand_label (gimple_label_label (stmt
));
1952 case GIMPLE_PREDICT
:
1958 expand_asm_stmt (stmt
);
1961 expand_call_stmt (stmt
);
1965 op0
= gimple_return_retval (stmt
);
1967 if (op0
&& op0
!= error_mark_node
)
1969 tree result
= DECL_RESULT (current_function_decl
);
1971 /* If we are not returning the current function's RESULT_DECL,
1972 build an assignment to it. */
1975 /* I believe that a function's RESULT_DECL is unique. */
1976 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
1978 /* ??? We'd like to use simply expand_assignment here,
1979 but this fails if the value is of BLKmode but the return
1980 decl is a register. expand_return has special handling
1981 for this combination, which eventually should move
1982 to common code. See comments there. Until then, let's
1983 build a modify expression :-/ */
1984 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
1989 expand_null_return ();
1991 expand_return (op0
);
1996 tree lhs
= gimple_assign_lhs (stmt
);
1998 /* Tree expand used to fiddle with |= and &= of two bitfield
1999 COMPONENT_REFs here. This can't happen with gimple, the LHS
2000 of binary assigns must be a gimple reg. */
2002 if (TREE_CODE (lhs
) != SSA_NAME
2003 || get_gimple_rhs_class (gimple_expr_code (stmt
))
2004 == GIMPLE_SINGLE_RHS
)
2006 tree rhs
= gimple_assign_rhs1 (stmt
);
2007 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
2008 == GIMPLE_SINGLE_RHS
);
2009 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
))
2010 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
2011 expand_assignment (lhs
, rhs
,
2012 gimple_assign_nontemporal_move_p (stmt
));
2017 bool nontemporal
= gimple_assign_nontemporal_move_p (stmt
);
2018 struct separate_ops ops
;
2019 bool promoted
= false;
2021 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
2022 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
2025 ops
.code
= gimple_assign_rhs_code (stmt
);
2026 ops
.type
= TREE_TYPE (lhs
);
2027 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
2029 case GIMPLE_TERNARY_RHS
:
2030 ops
.op2
= gimple_assign_rhs3 (stmt
);
2032 case GIMPLE_BINARY_RHS
:
2033 ops
.op1
= gimple_assign_rhs2 (stmt
);
2035 case GIMPLE_UNARY_RHS
:
2036 ops
.op0
= gimple_assign_rhs1 (stmt
);
2041 ops
.location
= gimple_location (stmt
);
2043 /* If we want to use a nontemporal store, force the value to
2044 register first. If we store into a promoted register,
2045 don't directly expand to target. */
2046 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
2047 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
2054 int unsignedp
= SUBREG_PROMOTED_UNSIGNED_P (target
);
2055 /* If TEMP is a VOIDmode constant, use convert_modes to make
2056 sure that we properly convert it. */
2057 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
2059 temp
= convert_modes (GET_MODE (target
),
2060 TYPE_MODE (ops
.type
),
2062 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
2063 GET_MODE (target
), temp
, unsignedp
);
2066 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
2068 else if (nontemporal
&& emit_storent_insn (target
, temp
))
2072 temp
= force_operand (temp
, target
);
2074 emit_move_insn (target
, temp
);
2085 /* Expand one gimple statement STMT and return the last RTL instruction
2086 before any of the newly generated ones.
2088 In addition to generating the necessary RTL instructions this also
2089 sets REG_EH_REGION notes if necessary and sets the current source
2090 location for diagnostics. */
2093 expand_gimple_stmt (gimple stmt
)
2095 location_t saved_location
= input_location
;
2096 rtx last
= get_last_insn ();
2101 /* We need to save and restore the current source location so that errors
2102 discovered during expansion are emitted with the right location. But
2103 it would be better if the diagnostic routines used the source location
2104 embedded in the tree nodes rather than globals. */
2105 if (gimple_has_location (stmt
))
2106 input_location
= gimple_location (stmt
);
2108 expand_gimple_stmt_1 (stmt
);
2110 /* Free any temporaries used to evaluate this statement. */
2113 input_location
= saved_location
;
2115 /* Mark all insns that may trap. */
2116 lp_nr
= lookup_stmt_eh_lp (stmt
);
2120 for (insn
= next_real_insn (last
); insn
;
2121 insn
= next_real_insn (insn
))
2123 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
2124 /* If we want exceptions for non-call insns, any
2125 may_trap_p instruction may throw. */
2126 && GET_CODE (PATTERN (insn
)) != CLOBBER
2127 && GET_CODE (PATTERN (insn
)) != USE
2128 && insn_could_throw_p (insn
))
2129 make_reg_eh_region_note (insn
, 0, lp_nr
);
2136 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
2137 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
2138 generated a tail call (something that might be denied by the ABI
2139 rules governing the call; see calls.c).
2141 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
2142 can still reach the rest of BB. The case here is __builtin_sqrt,
2143 where the NaN result goes through the external function (with a
2144 tailcall) and the normal result happens via a sqrt instruction. */
2147 expand_gimple_tailcall (basic_block bb
, gimple stmt
, bool *can_fallthru
)
2155 last2
= last
= expand_gimple_stmt (stmt
);
2157 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
2158 if (CALL_P (last
) && SIBLING_CALL_P (last
))
2161 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2163 *can_fallthru
= true;
2167 /* ??? Wouldn't it be better to just reset any pending stack adjust?
2168 Any instructions emitted here are about to be deleted. */
2169 do_pending_stack_adjust ();
2171 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
2172 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
2173 EH or abnormal edges, we shouldn't have created a tail call in
2174 the first place. So it seems to me we should just be removing
2175 all edges here, or redirecting the existing fallthru edge to
2181 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
2183 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
2185 if (e
->dest
!= EXIT_BLOCK_PTR
)
2187 e
->dest
->count
-= e
->count
;
2188 e
->dest
->frequency
-= EDGE_FREQUENCY (e
);
2189 if (e
->dest
->count
< 0)
2191 if (e
->dest
->frequency
< 0)
2192 e
->dest
->frequency
= 0;
2195 probability
+= e
->probability
;
2202 /* This is somewhat ugly: the call_expr expander often emits instructions
2203 after the sibcall (to perform the function return). These confuse the
2204 find_many_sub_basic_blocks code, so we need to get rid of these. */
2205 last
= NEXT_INSN (last
);
2206 gcc_assert (BARRIER_P (last
));
2208 *can_fallthru
= false;
2209 while (NEXT_INSN (last
))
2211 /* For instance an sqrt builtin expander expands if with
2212 sibcall in the then and label for `else`. */
2213 if (LABEL_P (NEXT_INSN (last
)))
2215 *can_fallthru
= true;
2218 delete_insn (NEXT_INSN (last
));
2221 e
= make_edge (bb
, EXIT_BLOCK_PTR
, EDGE_ABNORMAL
| EDGE_SIBCALL
);
2222 e
->probability
+= probability
;
2225 update_bb_for_insn (bb
);
2227 if (NEXT_INSN (last
))
2229 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2232 if (BARRIER_P (last
))
2233 BB_END (bb
) = PREV_INSN (last
);
2236 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2241 /* Return the difference between the floor and the truncated result of
2242 a signed division by OP1 with remainder MOD. */
2244 floor_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2246 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
2247 return gen_rtx_IF_THEN_ELSE
2248 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2249 gen_rtx_IF_THEN_ELSE
2250 (mode
, gen_rtx_LT (BImode
,
2251 gen_rtx_DIV (mode
, op1
, mod
),
2253 constm1_rtx
, const0_rtx
),
2257 /* Return the difference between the ceil and the truncated result of
2258 a signed division by OP1 with remainder MOD. */
2260 ceil_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2262 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
2263 return gen_rtx_IF_THEN_ELSE
2264 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2265 gen_rtx_IF_THEN_ELSE
2266 (mode
, gen_rtx_GT (BImode
,
2267 gen_rtx_DIV (mode
, op1
, mod
),
2269 const1_rtx
, const0_rtx
),
2273 /* Return the difference between the ceil and the truncated result of
2274 an unsigned division by OP1 with remainder MOD. */
2276 ceil_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
2278 /* (mod != 0 ? 1 : 0) */
2279 return gen_rtx_IF_THEN_ELSE
2280 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
2281 const1_rtx
, const0_rtx
);
2284 /* Return the difference between the rounded and the truncated result
2285 of a signed division by OP1 with remainder MOD. Halfway cases are
2286 rounded away from zero, rather than to the nearest even number. */
2288 round_sdiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2290 /* (abs (mod) >= abs (op1) - abs (mod)
2291 ? (op1 / mod > 0 ? 1 : -1)
2293 return gen_rtx_IF_THEN_ELSE
2294 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
2295 gen_rtx_MINUS (mode
,
2296 gen_rtx_ABS (mode
, op1
),
2297 gen_rtx_ABS (mode
, mod
))),
2298 gen_rtx_IF_THEN_ELSE
2299 (mode
, gen_rtx_GT (BImode
,
2300 gen_rtx_DIV (mode
, op1
, mod
),
2302 const1_rtx
, constm1_rtx
),
2306 /* Return the difference between the rounded and the truncated result
2307 of a unsigned division by OP1 with remainder MOD. Halfway cases
2308 are rounded away from zero, rather than to the nearest even
2311 round_udiv_adjust (enum machine_mode mode
, rtx mod
, rtx op1
)
2313 /* (mod >= op1 - mod ? 1 : 0) */
2314 return gen_rtx_IF_THEN_ELSE
2315 (mode
, gen_rtx_GE (BImode
, mod
,
2316 gen_rtx_MINUS (mode
, op1
, mod
)),
2317 const1_rtx
, const0_rtx
);
2320 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
2324 convert_debug_memory_address (enum machine_mode mode
, rtx x
,
2327 enum machine_mode xmode
= GET_MODE (x
);
2329 #ifndef POINTERS_EXTEND_UNSIGNED
2330 gcc_assert (mode
== Pmode
2331 || mode
== targetm
.addr_space
.address_mode (as
));
2332 gcc_assert (xmode
== mode
|| xmode
== VOIDmode
);
2335 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
2336 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
2338 gcc_assert (mode
== address_mode
|| mode
== pointer_mode
);
2340 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
2343 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
2344 x
= simplify_gen_subreg (mode
, x
, xmode
,
2345 subreg_lowpart_offset
2347 else if (POINTERS_EXTEND_UNSIGNED
> 0)
2348 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
2349 else if (!POINTERS_EXTEND_UNSIGNED
)
2350 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
2353 switch (GET_CODE (x
))
2356 if ((SUBREG_PROMOTED_VAR_P (x
)
2357 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
2358 || (GET_CODE (SUBREG_REG (x
)) == PLUS
2359 && REG_P (XEXP (SUBREG_REG (x
), 0))
2360 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
2361 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
2362 && GET_MODE (SUBREG_REG (x
)) == mode
)
2363 return SUBREG_REG (x
);
2366 temp
= gen_rtx_LABEL_REF (mode
, XEXP (x
, 0));
2367 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
2370 temp
= shallow_copy_rtx (x
);
2371 PUT_MODE (temp
, mode
);
2374 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2376 temp
= gen_rtx_CONST (mode
, temp
);
2380 if (CONST_INT_P (XEXP (x
, 1)))
2382 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
2384 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
2390 /* Don't know how to express ptr_extend as operation in debug info. */
2393 #endif /* POINTERS_EXTEND_UNSIGNED */
2398 /* Return an RTX equivalent to the value of the parameter DECL. */
2401 expand_debug_parm_decl (tree decl
)
2403 rtx incoming
= DECL_INCOMING_RTL (decl
);
2406 && GET_MODE (incoming
) != BLKmode
2407 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
2408 || (MEM_P (incoming
)
2409 && REG_P (XEXP (incoming
, 0))
2410 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
2412 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
2414 #ifdef HAVE_window_save
2415 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
2416 If the target machine has an explicit window save instruction, the
2417 actual entry value is the corresponding OUTGOING_REGNO instead. */
2418 if (REG_P (incoming
)
2419 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
2421 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
2422 OUTGOING_REGNO (REGNO (incoming
)), 0);
2423 else if (MEM_P (incoming
))
2425 rtx reg
= XEXP (incoming
, 0);
2426 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
2428 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
2429 incoming
= replace_equiv_address_nv (incoming
, reg
);
2434 ENTRY_VALUE_EXP (rtl
) = incoming
;
2439 && GET_MODE (incoming
) != BLKmode
2440 && !TREE_ADDRESSABLE (decl
)
2442 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
2443 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
2444 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
2445 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
2451 /* Return an RTX equivalent to the value of the tree expression EXP. */
2454 expand_debug_expr (tree exp
)
2456 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
2457 enum machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
2458 enum machine_mode inner_mode
= VOIDmode
;
2459 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
2462 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
2464 case tcc_expression
:
2465 switch (TREE_CODE (exp
))
2469 case WIDEN_MULT_PLUS_EXPR
:
2470 case WIDEN_MULT_MINUS_EXPR
:
2474 case TRUTH_ANDIF_EXPR
:
2475 case TRUTH_ORIF_EXPR
:
2476 case TRUTH_AND_EXPR
:
2478 case TRUTH_XOR_EXPR
:
2481 case TRUTH_NOT_EXPR
:
2490 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
2497 case tcc_comparison
:
2498 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2505 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2506 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2516 case tcc_exceptional
:
2517 case tcc_declaration
:
2523 switch (TREE_CODE (exp
))
2526 if (!lookup_constant_def (exp
))
2528 if (strlen (TREE_STRING_POINTER (exp
)) + 1
2529 != (size_t) TREE_STRING_LENGTH (exp
))
2531 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
2532 op0
= gen_rtx_MEM (BLKmode
, op0
);
2533 set_mem_attributes (op0
, exp
, 0);
2536 /* Fall through... */
2541 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
2545 gcc_assert (COMPLEX_MODE_P (mode
));
2546 op0
= expand_debug_expr (TREE_REALPART (exp
));
2547 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
2548 return gen_rtx_CONCAT (mode
, op0
, op1
);
2550 case DEBUG_EXPR_DECL
:
2551 op0
= DECL_RTL_IF_SET (exp
);
2556 op0
= gen_rtx_DEBUG_EXPR (mode
);
2557 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
2558 SET_DECL_RTL (exp
, op0
);
2568 op0
= DECL_RTL_IF_SET (exp
);
2570 /* This decl was probably optimized away. */
2573 if (TREE_CODE (exp
) != VAR_DECL
2574 || DECL_EXTERNAL (exp
)
2575 || !TREE_STATIC (exp
)
2577 || DECL_HARD_REGISTER (exp
)
2578 || DECL_IN_CONSTANT_POOL (exp
)
2579 || mode
== VOIDmode
)
2582 op0
= make_decl_rtl_for_debug (exp
);
2584 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
2585 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
2589 op0
= copy_rtx (op0
);
2591 if (GET_MODE (op0
) == BLKmode
2592 /* If op0 is not BLKmode, but BLKmode is, adjust_mode
2593 below would ICE. While it is likely a FE bug,
2594 try to be robust here. See PR43166. */
2596 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
2598 gcc_assert (MEM_P (op0
));
2599 op0
= adjust_address_nv (op0
, mode
, 0);
2610 inner_mode
= GET_MODE (op0
);
2612 if (mode
== inner_mode
)
2615 if (inner_mode
== VOIDmode
)
2617 if (TREE_CODE (exp
) == SSA_NAME
)
2618 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
2620 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
2621 if (mode
== inner_mode
)
2625 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
2627 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
2628 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
2629 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
2630 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
2632 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
2634 else if (FLOAT_MODE_P (mode
))
2636 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
2637 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
2638 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
2640 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
2642 else if (FLOAT_MODE_P (inner_mode
))
2645 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
2647 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
2649 else if (CONSTANT_P (op0
)
2650 || GET_MODE_PRECISION (mode
) <= GET_MODE_PRECISION (inner_mode
))
2651 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
2652 subreg_lowpart_offset (mode
,
2654 else if (TREE_CODE_CLASS (TREE_CODE (exp
)) == tcc_unary
2655 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
2657 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
2659 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
2665 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2667 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
2668 TREE_OPERAND (exp
, 0),
2669 TREE_OPERAND (exp
, 1));
2671 return expand_debug_expr (newexp
);
2675 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
2679 if (TREE_CODE (exp
) == MEM_REF
)
2681 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
2682 || (GET_CODE (op0
) == PLUS
2683 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
2684 /* (mem (debug_implicit_ptr)) might confuse aliasing.
2685 Instead just use get_inner_reference. */
2688 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
2689 if (!op1
|| !CONST_INT_P (op1
))
2692 op0
= plus_constant (op0
, INTVAL (op1
));
2695 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2696 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2698 as
= ADDR_SPACE_GENERIC
;
2700 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2702 if (op0
== NULL_RTX
)
2705 op0
= gen_rtx_MEM (mode
, op0
);
2706 set_mem_attributes (op0
, exp
, 0);
2707 if (TREE_CODE (exp
) == MEM_REF
2708 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
2709 set_mem_expr (op0
, NULL_TREE
);
2710 set_mem_addr_space (op0
, as
);
2714 case TARGET_MEM_REF
:
2715 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
2716 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
2719 op0
= expand_debug_expr
2720 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
2724 if (POINTER_TYPE_P (TREE_TYPE (exp
)))
2725 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
2727 as
= ADDR_SPACE_GENERIC
;
2729 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
2731 if (op0
== NULL_RTX
)
2734 op0
= gen_rtx_MEM (mode
, op0
);
2736 set_mem_attributes (op0
, exp
, 0);
2737 set_mem_addr_space (op0
, as
);
2743 case ARRAY_RANGE_REF
:
2748 case VIEW_CONVERT_EXPR
:
2750 enum machine_mode mode1
;
2751 HOST_WIDE_INT bitsize
, bitpos
;
2754 tree tem
= get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
,
2755 &mode1
, &unsignedp
, &volatilep
, false);
2761 orig_op0
= op0
= expand_debug_expr (tem
);
2768 enum machine_mode addrmode
, offmode
;
2773 op0
= XEXP (op0
, 0);
2774 addrmode
= GET_MODE (op0
);
2775 if (addrmode
== VOIDmode
)
2778 op1
= expand_debug_expr (offset
);
2782 offmode
= GET_MODE (op1
);
2783 if (offmode
== VOIDmode
)
2784 offmode
= TYPE_MODE (TREE_TYPE (offset
));
2786 if (addrmode
!= offmode
)
2787 op1
= simplify_gen_subreg (addrmode
, op1
, offmode
,
2788 subreg_lowpart_offset (addrmode
,
2791 /* Don't use offset_address here, we don't need a
2792 recognizable address, and we don't want to generate
2794 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
2800 if (mode1
== VOIDmode
)
2802 mode1
= smallest_mode_for_size (bitsize
, MODE_INT
);
2803 if (bitpos
>= BITS_PER_UNIT
)
2805 op0
= adjust_address_nv (op0
, mode1
, bitpos
/ BITS_PER_UNIT
);
2806 bitpos
%= BITS_PER_UNIT
;
2808 else if (bitpos
< 0)
2811 = (-bitpos
+ BITS_PER_UNIT
- 1) / BITS_PER_UNIT
;
2812 op0
= adjust_address_nv (op0
, mode1
, units
);
2813 bitpos
+= units
* BITS_PER_UNIT
;
2815 else if (bitpos
== 0 && bitsize
== GET_MODE_BITSIZE (mode
))
2816 op0
= adjust_address_nv (op0
, mode
, 0);
2817 else if (GET_MODE (op0
) != mode1
)
2818 op0
= adjust_address_nv (op0
, mode1
, 0);
2820 op0
= copy_rtx (op0
);
2821 if (op0
== orig_op0
)
2822 op0
= shallow_copy_rtx (op0
);
2823 set_mem_attributes (op0
, exp
, 0);
2826 if (bitpos
== 0 && mode
== GET_MODE (op0
))
2832 if (GET_MODE (op0
) == BLKmode
)
2835 if ((bitpos
% BITS_PER_UNIT
) == 0
2836 && bitsize
== GET_MODE_BITSIZE (mode1
))
2838 enum machine_mode opmode
= GET_MODE (op0
);
2840 if (opmode
== VOIDmode
)
2841 opmode
= TYPE_MODE (TREE_TYPE (tem
));
2843 /* This condition may hold if we're expanding the address
2844 right past the end of an array that turned out not to
2845 be addressable (i.e., the address was only computed in
2846 debug stmts). The gen_subreg below would rightfully
2847 crash, and the address doesn't really exist, so just
2849 if (bitpos
>= GET_MODE_BITSIZE (opmode
))
2852 if ((bitpos
% GET_MODE_BITSIZE (mode
)) == 0)
2853 return simplify_gen_subreg (mode
, op0
, opmode
,
2854 bitpos
/ BITS_PER_UNIT
);
2857 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
2858 && TYPE_UNSIGNED (TREE_TYPE (exp
))
2860 : ZERO_EXTRACT
, mode
,
2861 GET_MODE (op0
) != VOIDmode
2863 : TYPE_MODE (TREE_TYPE (tem
)),
2864 op0
, GEN_INT (bitsize
), GEN_INT (bitpos
));
2868 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
2871 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
2874 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
2877 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
2879 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
2882 case FIX_TRUNC_EXPR
:
2883 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
2886 case POINTER_PLUS_EXPR
:
2887 /* For the rare target where pointers are not the same size as
2888 size_t, we need to check for mis-matched modes and correct
2891 && GET_MODE (op0
) != VOIDmode
&& GET_MODE (op1
) != VOIDmode
2892 && GET_MODE (op0
) != GET_MODE (op1
))
2894 if (GET_MODE_BITSIZE (GET_MODE (op0
)) < GET_MODE_BITSIZE (GET_MODE (op1
)))
2895 op1
= simplify_gen_unary (TRUNCATE
, GET_MODE (op0
), op1
,
2898 /* We always sign-extend, regardless of the signedness of
2899 the operand, because the operand is always unsigned
2900 here even if the original C expression is signed. */
2901 op1
= simplify_gen_unary (SIGN_EXTEND
, GET_MODE (op0
), op1
,
2906 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
2909 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
2912 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
2915 case TRUNC_DIV_EXPR
:
2916 case EXACT_DIV_EXPR
:
2918 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
2920 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
2922 case TRUNC_MOD_EXPR
:
2923 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
2925 case FLOOR_DIV_EXPR
:
2927 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
2930 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
2931 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
2932 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
2933 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
2936 case FLOOR_MOD_EXPR
:
2938 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
2941 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
2942 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
2943 adj
= simplify_gen_unary (NEG
, mode
,
2944 simplify_gen_binary (MULT
, mode
, adj
, op1
),
2946 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
2952 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
2953 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
2954 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
2955 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
2959 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
2960 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
2961 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
2962 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
2968 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
2969 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
2970 adj
= simplify_gen_unary (NEG
, mode
,
2971 simplify_gen_binary (MULT
, mode
, adj
, op1
),
2973 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
2977 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
2978 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
2979 adj
= simplify_gen_unary (NEG
, mode
,
2980 simplify_gen_binary (MULT
, mode
, adj
, op1
),
2982 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
2985 case ROUND_DIV_EXPR
:
2988 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
2989 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
2990 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
2991 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
2995 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
2996 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
2997 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
2998 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
3001 case ROUND_MOD_EXPR
:
3004 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
3005 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
3006 adj
= simplify_gen_unary (NEG
, mode
,
3007 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3009 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3013 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
3014 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
3015 adj
= simplify_gen_unary (NEG
, mode
,
3016 simplify_gen_binary (MULT
, mode
, adj
, op1
),
3018 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
3022 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
3026 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
3028 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
3031 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
3034 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
3037 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
3040 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
3043 case TRUTH_AND_EXPR
:
3044 return simplify_gen_binary (AND
, mode
, op0
, op1
);
3048 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
3051 case TRUTH_XOR_EXPR
:
3052 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
3054 case TRUTH_ANDIF_EXPR
:
3055 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
3057 case TRUTH_ORIF_EXPR
:
3058 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
3060 case TRUTH_NOT_EXPR
:
3061 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
3064 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
3068 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
3072 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
3076 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
3080 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
3083 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
3085 case UNORDERED_EXPR
:
3086 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
3089 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
3092 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
3095 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
3098 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
3101 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
3104 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
3107 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
3110 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
3113 gcc_assert (COMPLEX_MODE_P (mode
));
3114 if (GET_MODE (op0
) == VOIDmode
)
3115 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
3116 if (GET_MODE (op1
) == VOIDmode
)
3117 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
3118 return gen_rtx_CONCAT (mode
, op0
, op1
);
3121 if (GET_CODE (op0
) == CONCAT
)
3122 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
3123 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
3125 GET_MODE_INNER (mode
)));
3128 enum machine_mode imode
= GET_MODE_INNER (mode
);
3133 re
= adjust_address_nv (op0
, imode
, 0);
3134 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
3138 enum machine_mode ifmode
= int_mode_for_mode (mode
);
3139 enum machine_mode ihmode
= int_mode_for_mode (imode
);
3141 if (ifmode
== BLKmode
|| ihmode
== BLKmode
)
3143 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
3146 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
3147 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
3148 if (imode
!= ihmode
)
3149 re
= gen_rtx_SUBREG (imode
, re
, 0);
3150 im
= copy_rtx (op0
);
3152 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
3153 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
3154 if (imode
!= ihmode
)
3155 im
= gen_rtx_SUBREG (imode
, im
, 0);
3157 im
= gen_rtx_NEG (imode
, im
);
3158 return gen_rtx_CONCAT (mode
, re
, im
);
3162 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
3163 if (!op0
|| !MEM_P (op0
))
3165 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
3166 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
3167 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
3168 && !TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0)))
3169 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
3171 if (handled_component_p (TREE_OPERAND (exp
, 0)))
3173 HOST_WIDE_INT bitoffset
, bitsize
, maxsize
;
3175 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0),
3176 &bitoffset
, &bitsize
, &maxsize
);
3177 if ((TREE_CODE (decl
) == VAR_DECL
3178 || TREE_CODE (decl
) == PARM_DECL
3179 || TREE_CODE (decl
) == RESULT_DECL
)
3180 && !TREE_ADDRESSABLE (decl
)
3181 && (bitoffset
% BITS_PER_UNIT
) == 0
3183 && bitsize
== maxsize
)
3184 return plus_constant (gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
),
3185 bitoffset
/ BITS_PER_UNIT
);
3191 as
= TYPE_ADDR_SPACE (TREE_TYPE (exp
));
3192 op0
= convert_debug_memory_address (mode
, XEXP (op0
, 0), as
);
3197 exp
= build_constructor_from_list (TREE_TYPE (exp
),
3198 TREE_VECTOR_CST_ELTS (exp
));
3202 if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
3207 op0
= gen_rtx_CONCATN
3208 (mode
, rtvec_alloc (TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
))));
3210 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
3212 op1
= expand_debug_expr (val
);
3215 XVECEXP (op0
, 0, i
) = op1
;
3218 if (i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)))
3220 op1
= expand_debug_expr
3221 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
3226 for (; i
< TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)); i
++)
3227 XVECEXP (op0
, 0, i
) = op1
;
3233 goto flag_unsupported
;
3236 /* ??? Maybe handle some builtins? */
3241 gimple g
= get_gimple_for_ssa_name (exp
);
3244 op0
= expand_debug_expr (gimple_assign_rhs_to_tree (g
));
3250 int part
= var_to_partition (SA
.map
, exp
);
3252 if (part
== NO_PARTITION
)
3254 /* If this is a reference to an incoming value of parameter
3255 that is never used in the code or where the incoming
3256 value is never used in the code, use PARM_DECL's
3258 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
3259 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
)
3261 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
3264 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
3271 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
3273 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
3281 /* Vector stuff. For most of the codes we don't have rtl codes. */
3282 case REALIGN_LOAD_EXPR
:
3283 case REDUC_MAX_EXPR
:
3284 case REDUC_MIN_EXPR
:
3285 case REDUC_PLUS_EXPR
:
3287 case VEC_EXTRACT_EVEN_EXPR
:
3288 case VEC_EXTRACT_ODD_EXPR
:
3289 case VEC_INTERLEAVE_HIGH_EXPR
:
3290 case VEC_INTERLEAVE_LOW_EXPR
:
3291 case VEC_LSHIFT_EXPR
:
3292 case VEC_PACK_FIX_TRUNC_EXPR
:
3293 case VEC_PACK_SAT_EXPR
:
3294 case VEC_PACK_TRUNC_EXPR
:
3295 case VEC_RSHIFT_EXPR
:
3296 case VEC_UNPACK_FLOAT_HI_EXPR
:
3297 case VEC_UNPACK_FLOAT_LO_EXPR
:
3298 case VEC_UNPACK_HI_EXPR
:
3299 case VEC_UNPACK_LO_EXPR
:
3300 case VEC_WIDEN_MULT_HI_EXPR
:
3301 case VEC_WIDEN_MULT_LO_EXPR
:
3302 case VEC_WIDEN_LSHIFT_HI_EXPR
:
3303 case VEC_WIDEN_LSHIFT_LO_EXPR
:
3308 case ADDR_SPACE_CONVERT_EXPR
:
3309 case FIXED_CONVERT_EXPR
:
3311 case WITH_SIZE_EXPR
:
3315 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3316 && SCALAR_INT_MODE_P (mode
))
3319 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3321 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3324 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3326 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
3328 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3329 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3333 case WIDEN_MULT_EXPR
:
3334 case WIDEN_MULT_PLUS_EXPR
:
3335 case WIDEN_MULT_MINUS_EXPR
:
3336 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3337 && SCALAR_INT_MODE_P (mode
))
3339 inner_mode
= GET_MODE (op0
);
3340 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
3341 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3343 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3344 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
3345 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
3347 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
3348 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
3349 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
3351 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
3352 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
3354 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
3358 case WIDEN_SUM_EXPR
:
3359 case WIDEN_LSHIFT_EXPR
:
3360 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
3361 && SCALAR_INT_MODE_P (mode
))
3364 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
3366 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
3368 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
3369 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
3374 return simplify_gen_ternary (FMA
, mode
, inner_mode
, op0
, op1
, op2
);
3378 #ifdef ENABLE_CHECKING
3387 /* Return an RTX equivalent to the source bind value of the tree expression
3391 expand_debug_source_expr (tree exp
)
3394 enum machine_mode mode
= VOIDmode
, inner_mode
;
3396 switch (TREE_CODE (exp
))
3400 mode
= DECL_MODE (exp
);
3401 op0
= expand_debug_parm_decl (exp
);
3404 /* See if this isn't an argument that has been completely
3406 if (!DECL_RTL_SET_P (exp
)
3407 && !DECL_INCOMING_RTL (exp
)
3408 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
3411 if (DECL_ABSTRACT_ORIGIN (exp
))
3412 aexp
= DECL_ABSTRACT_ORIGIN (exp
);
3413 if (DECL_CONTEXT (aexp
)
3414 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
3416 VEC(tree
, gc
) **debug_args
;
3419 #ifdef ENABLE_CHECKING
3421 for (parm
= DECL_ARGUMENTS (current_function_decl
);
3422 parm
; parm
= DECL_CHAIN (parm
))
3423 gcc_assert (parm
!= exp
3424 && DECL_ABSTRACT_ORIGIN (parm
) != aexp
);
3426 debug_args
= decl_debug_args_lookup (current_function_decl
);
3427 if (debug_args
!= NULL
)
3429 for (ix
= 0; VEC_iterate (tree
, *debug_args
, ix
, ddecl
);
3432 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
3442 if (op0
== NULL_RTX
)
3445 inner_mode
= GET_MODE (op0
);
3446 if (mode
== inner_mode
)
3449 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
3451 if (GET_MODE_BITSIZE (mode
) == GET_MODE_BITSIZE (inner_mode
))
3452 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
3453 else if (GET_MODE_BITSIZE (mode
) < GET_MODE_BITSIZE (inner_mode
))
3454 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
3456 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
3458 else if (FLOAT_MODE_P (mode
))
3460 else if (FLOAT_MODE_P (inner_mode
))
3462 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3463 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
3465 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
3467 else if (CONSTANT_P (op0
)
3468 || GET_MODE_BITSIZE (mode
) <= GET_MODE_BITSIZE (inner_mode
))
3469 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
,
3470 subreg_lowpart_offset (mode
, inner_mode
));
3471 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
3472 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
3474 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
3479 /* Expand the _LOCs in debug insns. We run this after expanding all
3480 regular insns, so that any variables referenced in the function
3481 will have their DECL_RTLs set. */
3484 expand_debug_locations (void)
3487 rtx last
= get_last_insn ();
3488 int save_strict_alias
= flag_strict_aliasing
;
3490 /* New alias sets while setting up memory attributes cause
3491 -fcompare-debug failures, even though it doesn't bring about any
3493 flag_strict_aliasing
= 0;
3495 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
3496 if (DEBUG_INSN_P (insn
))
3498 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
3500 enum machine_mode mode
;
3502 if (value
== NULL_TREE
)
3506 if (INSN_VAR_LOCATION_STATUS (insn
)
3507 == VAR_INIT_STATUS_UNINITIALIZED
)
3508 val
= expand_debug_source_expr (value
);
3510 val
= expand_debug_expr (value
);
3511 gcc_assert (last
== get_last_insn ());
3515 val
= gen_rtx_UNKNOWN_VAR_LOC ();
3518 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
3520 gcc_assert (mode
== GET_MODE (val
)
3521 || (GET_MODE (val
) == VOIDmode
3522 && (CONST_INT_P (val
)
3523 || GET_CODE (val
) == CONST_FIXED
3524 || GET_CODE (val
) == CONST_DOUBLE
3525 || GET_CODE (val
) == LABEL_REF
)));
3528 INSN_VAR_LOCATION_LOC (insn
) = val
;
3531 flag_strict_aliasing
= save_strict_alias
;
3534 /* Expand basic block BB from GIMPLE trees to RTL. */
3537 expand_gimple_basic_block (basic_block bb
)
3539 gimple_stmt_iterator gsi
;
3548 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
3551 /* Note that since we are now transitioning from GIMPLE to RTL, we
3552 cannot use the gsi_*_bb() routines because they expect the basic
3553 block to be in GIMPLE, instead of RTL. Therefore, we need to
3554 access the BB sequence directly. */
3555 stmts
= bb_seq (bb
);
3556 bb
->il
.gimple
= NULL
;
3557 rtl_profile_for_bb (bb
);
3558 init_rtl_bb_info (bb
);
3559 bb
->flags
|= BB_RTL
;
3561 /* Remove the RETURN_EXPR if we may fall though to the exit
3563 gsi
= gsi_last (stmts
);
3564 if (!gsi_end_p (gsi
)
3565 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
3567 gimple ret_stmt
= gsi_stmt (gsi
);
3569 gcc_assert (single_succ_p (bb
));
3570 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR
);
3572 if (bb
->next_bb
== EXIT_BLOCK_PTR
3573 && !gimple_return_retval (ret_stmt
))
3575 gsi_remove (&gsi
, false);
3576 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
3580 gsi
= gsi_start (stmts
);
3581 if (!gsi_end_p (gsi
))
3583 stmt
= gsi_stmt (gsi
);
3584 if (gimple_code (stmt
) != GIMPLE_LABEL
)
3588 elt
= pointer_map_contains (lab_rtx_for_bb
, bb
);
3592 last
= get_last_insn ();
3596 expand_gimple_stmt (stmt
);
3601 emit_label ((rtx
) *elt
);
3603 /* Java emits line number notes in the top of labels.
3604 ??? Make this go away once line number notes are obsoleted. */
3605 BB_HEAD (bb
) = NEXT_INSN (last
);
3606 if (NOTE_P (BB_HEAD (bb
)))
3607 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
3608 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
3610 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3613 note
= BB_HEAD (bb
) = emit_note (NOTE_INSN_BASIC_BLOCK
);
3615 NOTE_BASIC_BLOCK (note
) = bb
;
3617 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
3621 stmt
= gsi_stmt (gsi
);
3623 /* If this statement is a non-debug one, and we generate debug
3624 insns, then this one might be the last real use of a TERed
3625 SSA_NAME, but where there are still some debug uses further
3626 down. Expanding the current SSA name in such further debug
3627 uses by their RHS might lead to wrong debug info, as coalescing
3628 might make the operands of such RHS be placed into the same
3629 pseudo as something else. Like so:
3630 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
3634 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
3635 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
3636 the write to a_2 would actually have clobbered the place which
3639 So, instead of that, we recognize the situation, and generate
3640 debug temporaries at the last real use of TERed SSA names:
3647 if (MAY_HAVE_DEBUG_INSNS
3649 && !is_gimple_debug (stmt
))
3655 location_t sloc
= get_curr_insn_source_location ();
3656 tree sblock
= get_curr_insn_block ();
3658 /* Look for SSA names that have their last use here (TERed
3659 names always have only one real use). */
3660 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
3661 if ((def
= get_gimple_for_ssa_name (op
)))
3663 imm_use_iterator imm_iter
;
3664 use_operand_p use_p
;
3665 bool have_debug_uses
= false;
3667 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
3669 if (gimple_debug_bind_p (USE_STMT (use_p
)))
3671 have_debug_uses
= true;
3676 if (have_debug_uses
)
3678 /* OP is a TERed SSA name, with DEF it's defining
3679 statement, and where OP is used in further debug
3680 instructions. Generate a debug temporary, and
3681 replace all uses of OP in debug insns with that
3684 tree value
= gimple_assign_rhs_to_tree (def
);
3685 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
3687 enum machine_mode mode
;
3689 set_curr_insn_source_location (gimple_location (def
));
3690 set_curr_insn_block (gimple_block (def
));
3692 DECL_ARTIFICIAL (vexpr
) = 1;
3693 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
3695 mode
= DECL_MODE (value
);
3697 mode
= TYPE_MODE (TREE_TYPE (value
));
3698 DECL_MODE (vexpr
) = mode
;
3700 val
= gen_rtx_VAR_LOCATION
3701 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3703 emit_debug_insn (val
);
3705 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
3707 if (!gimple_debug_bind_p (debugstmt
))
3710 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
3711 SET_USE (use_p
, vexpr
);
3713 update_stmt (debugstmt
);
3717 set_curr_insn_source_location (sloc
);
3718 set_curr_insn_block (sblock
);
3721 currently_expanding_gimple_stmt
= stmt
;
3723 /* Expand this statement, then evaluate the resulting RTL and
3724 fixup the CFG accordingly. */
3725 if (gimple_code (stmt
) == GIMPLE_COND
)
3727 new_bb
= expand_gimple_cond (bb
, stmt
);
3731 else if (gimple_debug_bind_p (stmt
))
3733 location_t sloc
= get_curr_insn_source_location ();
3734 tree sblock
= get_curr_insn_block ();
3735 gimple_stmt_iterator nsi
= gsi
;
3739 tree var
= gimple_debug_bind_get_var (stmt
);
3742 enum machine_mode mode
;
3744 if (gimple_debug_bind_has_value_p (stmt
))
3745 value
= gimple_debug_bind_get_value (stmt
);
3749 last
= get_last_insn ();
3751 set_curr_insn_source_location (gimple_location (stmt
));
3752 set_curr_insn_block (gimple_block (stmt
));
3755 mode
= DECL_MODE (var
);
3757 mode
= TYPE_MODE (TREE_TYPE (var
));
3759 val
= gen_rtx_VAR_LOCATION
3760 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
3762 emit_debug_insn (val
);
3764 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3766 /* We can't dump the insn with a TREE where an RTX
3768 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
3769 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3770 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3773 /* In order not to generate too many debug temporaries,
3774 we delink all uses of debug statements we already expanded.
3775 Therefore debug statements between definition and real
3776 use of TERed SSA names will continue to use the SSA name,
3777 and not be replaced with debug temps. */
3778 delink_stmt_imm_use (stmt
);
3782 if (gsi_end_p (nsi
))
3784 stmt
= gsi_stmt (nsi
);
3785 if (!gimple_debug_bind_p (stmt
))
3789 set_curr_insn_source_location (sloc
);
3790 set_curr_insn_block (sblock
);
3792 else if (gimple_debug_source_bind_p (stmt
))
3794 location_t sloc
= get_curr_insn_source_location ();
3795 tree sblock
= get_curr_insn_block ();
3796 tree var
= gimple_debug_source_bind_get_var (stmt
);
3797 tree value
= gimple_debug_source_bind_get_value (stmt
);
3799 enum machine_mode mode
;
3801 last
= get_last_insn ();
3803 set_curr_insn_source_location (gimple_location (stmt
));
3804 set_curr_insn_block (gimple_block (stmt
));
3806 mode
= DECL_MODE (var
);
3808 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
3809 VAR_INIT_STATUS_UNINITIALIZED
);
3811 emit_debug_insn (val
);
3813 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
3815 /* We can't dump the insn with a TREE where an RTX
3817 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
3818 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3819 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
3822 set_curr_insn_source_location (sloc
);
3823 set_curr_insn_block (sblock
);
3827 if (is_gimple_call (stmt
) && gimple_call_tail_p (stmt
))
3830 new_bb
= expand_gimple_tailcall (bb
, stmt
, &can_fallthru
);
3841 def_operand_p def_p
;
3842 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
3846 /* Ignore this stmt if it is in the list of
3847 replaceable expressions. */
3849 && bitmap_bit_p (SA
.values
,
3850 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
3853 last
= expand_gimple_stmt (stmt
);
3854 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
3859 currently_expanding_gimple_stmt
= NULL
;
3861 /* Expand implicit goto and convert goto_locus. */
3862 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
3864 if (e
->goto_locus
&& e
->goto_block
)
3866 set_curr_insn_source_location (e
->goto_locus
);
3867 set_curr_insn_block (e
->goto_block
);
3868 e
->goto_locus
= curr_insn_locator ();
3870 e
->goto_block
= NULL
;
3871 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
3873 emit_jump (label_rtx_for_bb (e
->dest
));
3874 e
->flags
&= ~EDGE_FALLTHRU
;
3878 /* Expanded RTL can create a jump in the last instruction of block.
3879 This later might be assumed to be a jump to successor and break edge insertion.
3880 We need to insert dummy move to prevent this. PR41440. */
3881 if (single_succ_p (bb
)
3882 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
3883 && (last
= get_last_insn ())
3886 rtx dummy
= gen_reg_rtx (SImode
);
3887 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
3890 do_pending_stack_adjust ();
3892 /* Find the block tail. The last insn in the block is the insn
3893 before a barrier and/or table jump insn. */
3894 last
= get_last_insn ();
3895 if (BARRIER_P (last
))
3896 last
= PREV_INSN (last
);
3897 if (JUMP_TABLE_DATA_P (last
))
3898 last
= PREV_INSN (PREV_INSN (last
));
3901 update_bb_for_insn (bb
);
3907 /* Create a basic block for initialization code. */
3910 construct_init_block (void)
3912 basic_block init_block
, first_block
;
3916 /* Multiple entry points not supported yet. */
3917 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR
->succs
) == 1);
3918 init_rtl_bb_info (ENTRY_BLOCK_PTR
);
3919 init_rtl_bb_info (EXIT_BLOCK_PTR
);
3920 ENTRY_BLOCK_PTR
->flags
|= BB_RTL
;
3921 EXIT_BLOCK_PTR
->flags
|= BB_RTL
;
3923 e
= EDGE_SUCC (ENTRY_BLOCK_PTR
, 0);
3925 /* When entry edge points to first basic block, we don't need jump,
3926 otherwise we have to jump into proper target. */
3927 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR
->next_bb
)
3929 tree label
= gimple_block_label (e
->dest
);
3931 emit_jump (label_rtx (label
));
3935 flags
= EDGE_FALLTHRU
;
3937 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
3940 init_block
->frequency
= ENTRY_BLOCK_PTR
->frequency
;
3941 init_block
->count
= ENTRY_BLOCK_PTR
->count
;
3944 first_block
= e
->dest
;
3945 redirect_edge_succ (e
, init_block
);
3946 e
= make_edge (init_block
, first_block
, flags
);
3949 e
= make_edge (init_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
3950 e
->probability
= REG_BR_PROB_BASE
;
3951 e
->count
= ENTRY_BLOCK_PTR
->count
;
3953 update_bb_for_insn (init_block
);
3957 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
3958 found in the block tree. */
3961 set_block_levels (tree block
, int level
)
3965 BLOCK_NUMBER (block
) = level
;
3966 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
3967 block
= BLOCK_CHAIN (block
);
3971 /* Create a block containing landing pads and similar stuff. */
3974 construct_exit_block (void)
3976 rtx head
= get_last_insn ();
3978 basic_block exit_block
;
3982 rtx orig_end
= BB_END (EXIT_BLOCK_PTR
->prev_bb
);
3984 rtl_profile_for_bb (EXIT_BLOCK_PTR
);
3986 /* Make sure the locus is set to the end of the function, so that
3987 epilogue line numbers and warnings are set properly. */
3988 if (cfun
->function_end_locus
!= UNKNOWN_LOCATION
)
3989 input_location
= cfun
->function_end_locus
;
3991 /* The following insns belong to the top scope. */
3992 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
3994 /* Generate rtl for function exit. */
3995 expand_function_end ();
3997 end
= get_last_insn ();
4000 /* While emitting the function end we could move end of the last basic block.
4002 BB_END (EXIT_BLOCK_PTR
->prev_bb
) = orig_end
;
4003 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
4004 head
= NEXT_INSN (head
);
4005 exit_block
= create_basic_block (NEXT_INSN (head
), end
,
4006 EXIT_BLOCK_PTR
->prev_bb
);
4007 exit_block
->frequency
= EXIT_BLOCK_PTR
->frequency
;
4008 exit_block
->count
= EXIT_BLOCK_PTR
->count
;
4011 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR
->preds
))
4013 e
= EDGE_PRED (EXIT_BLOCK_PTR
, ix
);
4014 if (!(e
->flags
& EDGE_ABNORMAL
))
4015 redirect_edge_succ (e
, exit_block
);
4020 e
= make_edge (exit_block
, EXIT_BLOCK_PTR
, EDGE_FALLTHRU
);
4021 e
->probability
= REG_BR_PROB_BASE
;
4022 e
->count
= EXIT_BLOCK_PTR
->count
;
4023 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR
->preds
)
4026 e
->count
-= e2
->count
;
4027 exit_block
->count
-= e2
->count
;
4028 exit_block
->frequency
-= EDGE_FREQUENCY (e2
);
4032 if (exit_block
->count
< 0)
4033 exit_block
->count
= 0;
4034 if (exit_block
->frequency
< 0)
4035 exit_block
->frequency
= 0;
4036 update_bb_for_insn (exit_block
);
4039 /* Helper function for discover_nonconstant_array_refs.
4040 Look for ARRAY_REF nodes with non-constant indexes and mark them
4044 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
4045 void *data ATTRIBUTE_UNUSED
)
4049 if (IS_TYPE_OR_DECL_P (t
))
4051 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4053 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4054 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
4055 && (!TREE_OPERAND (t
, 2)
4056 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4057 || (TREE_CODE (t
) == COMPONENT_REF
4058 && (!TREE_OPERAND (t
,2)
4059 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
4060 || TREE_CODE (t
) == BIT_FIELD_REF
4061 || TREE_CODE (t
) == REALPART_EXPR
4062 || TREE_CODE (t
) == IMAGPART_EXPR
4063 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
4064 || CONVERT_EXPR_P (t
))
4065 t
= TREE_OPERAND (t
, 0);
4067 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
4069 t
= get_base_address (t
);
4071 && DECL_MODE (t
) != BLKmode
)
4072 TREE_ADDRESSABLE (t
) = 1;
4081 /* RTL expansion is not able to compile array references with variable
4082 offsets for arrays stored in single register. Discover such
4083 expressions and mark variables as addressable to avoid this
4087 discover_nonconstant_array_refs (void)
4090 gimple_stmt_iterator gsi
;
4093 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
4095 gimple stmt
= gsi_stmt (gsi
);
4096 if (!is_gimple_debug (stmt
))
4097 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
4101 /* This function sets crtl->args.internal_arg_pointer to a virtual
4102 register if DRAP is needed. Local register allocator will replace
4103 virtual_incoming_args_rtx with the virtual register. */
4106 expand_stack_alignment (void)
4109 unsigned int preferred_stack_boundary
;
4111 if (! SUPPORTS_STACK_ALIGNMENT
)
4114 if (cfun
->calls_alloca
4115 || cfun
->has_nonlocal_label
4116 || crtl
->has_nonlocal_goto
)
4117 crtl
->need_drap
= true;
4119 /* Call update_stack_boundary here again to update incoming stack
4120 boundary. It may set incoming stack alignment to a different
4121 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
4122 use the minimum incoming stack alignment to check if it is OK
4123 to perform sibcall optimization since sibcall optimization will
4124 only align the outgoing stack to incoming stack boundary. */
4125 if (targetm
.calls
.update_stack_boundary
)
4126 targetm
.calls
.update_stack_boundary ();
4128 /* The incoming stack frame has to be aligned at least at
4129 parm_stack_boundary. */
4130 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
4132 /* Update crtl->stack_alignment_estimated and use it later to align
4133 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
4134 exceptions since callgraph doesn't collect incoming stack alignment
4136 if (cfun
->can_throw_non_call_exceptions
4137 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
4138 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
4140 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
4141 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
4142 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
4143 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
4144 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
4146 gcc_assert (crtl
->stack_alignment_needed
4147 <= crtl
->stack_alignment_estimated
);
4149 crtl
->stack_realign_needed
4150 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
4151 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
4153 crtl
->stack_realign_processed
= true;
4155 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
4157 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
4158 drap_rtx
= targetm
.calls
.get_drap_rtx ();
4160 /* stack_realign_drap and drap_rtx must match. */
4161 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
4163 /* Do nothing if NULL is returned, which means DRAP is not needed. */
4164 if (NULL
!= drap_rtx
)
4166 crtl
->args
.internal_arg_pointer
= drap_rtx
;
4168 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
4170 fixup_tail_calls ();
4174 /* Translate the intermediate representation contained in the CFG
4175 from GIMPLE trees to RTL.
4177 We do conversion per basic block and preserve/update the tree CFG.
4178 This implies we have to do some magic as the CFG can simultaneously
4179 consist of basic blocks containing RTL and GIMPLE trees. This can
4180 confuse the CFG hooks, so be careful to not manipulate CFG during
4184 gimple_expand_cfg (void)
4186 basic_block bb
, init_block
;
4193 timevar_push (TV_OUT_OF_SSA
);
4194 rewrite_out_of_ssa (&SA
);
4195 timevar_pop (TV_OUT_OF_SSA
);
4196 SA
.partition_to_pseudo
= (rtx
*)xcalloc (SA
.map
->num_partitions
,
4199 /* Some backends want to know that we are expanding to RTL. */
4200 currently_expanding_to_rtl
= 1;
4202 rtl_profile_for_bb (ENTRY_BLOCK_PTR
);
4204 insn_locators_alloc ();
4205 if (!DECL_IS_BUILTIN (current_function_decl
))
4207 /* Eventually, all FEs should explicitly set function_start_locus. */
4208 if (cfun
->function_start_locus
== UNKNOWN_LOCATION
)
4209 set_curr_insn_source_location
4210 (DECL_SOURCE_LOCATION (current_function_decl
));
4212 set_curr_insn_source_location (cfun
->function_start_locus
);
4215 set_curr_insn_source_location (UNKNOWN_LOCATION
);
4216 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4217 prologue_locator
= curr_insn_locator ();
4219 #ifdef INSN_SCHEDULING
4220 init_sched_attrs ();
4223 /* Make sure first insn is a note even if we don't want linenums.
4224 This makes sure the first insn will never be deleted.
4225 Also, final expects a note to appear there. */
4226 emit_note (NOTE_INSN_DELETED
);
4228 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
4229 discover_nonconstant_array_refs ();
4231 targetm
.expand_to_rtl_hook ();
4232 crtl
->stack_alignment_needed
= STACK_BOUNDARY
;
4233 crtl
->max_used_stack_slot_alignment
= STACK_BOUNDARY
;
4234 crtl
->stack_alignment_estimated
= 0;
4235 crtl
->preferred_stack_boundary
= STACK_BOUNDARY
;
4236 cfun
->cfg
->max_jumptable_ents
= 0;
4238 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
4239 of the function section at exapnsion time to predict distance of calls. */
4240 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
4242 /* Expand the variables recorded during gimple lowering. */
4243 timevar_push (TV_VAR_EXPAND
);
4246 expand_used_vars ();
4248 var_seq
= get_insns ();
4250 timevar_pop (TV_VAR_EXPAND
);
4252 /* Honor stack protection warnings. */
4253 if (warn_stack_protect
)
4255 if (cfun
->calls_alloca
)
4256 warning (OPT_Wstack_protector
,
4257 "stack protector not protecting local variables: "
4258 "variable length buffer");
4259 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
4260 warning (OPT_Wstack_protector
,
4261 "stack protector not protecting function: "
4262 "all local arrays are less than %d bytes long",
4263 (int) PARAM_VALUE (PARAM_SSP_BUFFER_SIZE
));
4266 /* Set up parameters and prepare for return, for the function. */
4267 expand_function_start (current_function_decl
);
4269 /* If we emitted any instructions for setting up the variables,
4270 emit them before the FUNCTION_START note. */
4273 emit_insn_before (var_seq
, parm_birth_insn
);
4275 /* In expand_function_end we'll insert the alloca save/restore
4276 before parm_birth_insn. We've just insertted an alloca call.
4277 Adjust the pointer to match. */
4278 parm_birth_insn
= var_seq
;
4281 /* Now that we also have the parameter RTXs, copy them over to our
4283 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
4285 tree var
= SSA_NAME_VAR (partition_to_var (SA
.map
, i
));
4287 if (TREE_CODE (var
) != VAR_DECL
4288 && !SA
.partition_to_pseudo
[i
])
4289 SA
.partition_to_pseudo
[i
] = DECL_RTL_IF_SET (var
);
4290 gcc_assert (SA
.partition_to_pseudo
[i
]);
4292 /* If this decl was marked as living in multiple places, reset
4293 this now to NULL. */
4294 if (DECL_RTL_IF_SET (var
) == pc_rtx
)
4295 SET_DECL_RTL (var
, NULL
);
4297 /* Some RTL parts really want to look at DECL_RTL(x) when x
4298 was a decl marked in REG_ATTR or MEM_ATTR. We could use
4299 SET_DECL_RTL here making this available, but that would mean
4300 to select one of the potentially many RTLs for one DECL. Instead
4301 of doing that we simply reset the MEM_EXPR of the RTL in question,
4302 then nobody can get at it and hence nobody can call DECL_RTL on it. */
4303 if (!DECL_RTL_SET_P (var
))
4305 if (MEM_P (SA
.partition_to_pseudo
[i
]))
4306 set_mem_expr (SA
.partition_to_pseudo
[i
], NULL
);
4310 /* If we have a class containing differently aligned pointers
4311 we need to merge those into the corresponding RTL pointer
4313 for (i
= 1; i
< num_ssa_names
; i
++)
4315 tree name
= ssa_name (i
);
4320 || !POINTER_TYPE_P (TREE_TYPE (name
))
4321 /* We might have generated new SSA names in
4322 update_alias_info_with_stack_vars. They will have a NULL
4323 defining statements, and won't be part of the partitioning,
4325 || !SSA_NAME_DEF_STMT (name
))
4327 part
= var_to_partition (SA
.map
, name
);
4328 if (part
== NO_PARTITION
)
4330 r
= SA
.partition_to_pseudo
[part
];
4332 mark_reg_pointer (r
, get_pointer_alignment (name
));
4335 /* If this function is `main', emit a call to `__main'
4336 to run global initializers, etc. */
4337 if (DECL_NAME (current_function_decl
)
4338 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
4339 && DECL_FILE_SCOPE_P (current_function_decl
))
4340 expand_main_function ();
4342 /* Initialize the stack_protect_guard field. This must happen after the
4343 call to __main (if any) so that the external decl is initialized. */
4344 if (crtl
->stack_protect_guard
)
4345 stack_protect_prologue ();
4347 expand_phi_nodes (&SA
);
4349 /* Register rtl specific functions for cfg. */
4350 rtl_register_cfg_hooks ();
4352 init_block
= construct_init_block ();
4354 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
4355 remaining edges later. */
4356 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR
->succs
)
4357 e
->flags
&= ~EDGE_EXECUTABLE
;
4359 lab_rtx_for_bb
= pointer_map_create ();
4360 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4361 bb
= expand_gimple_basic_block (bb
);
4363 if (MAY_HAVE_DEBUG_INSNS
)
4364 expand_debug_locations ();
4366 execute_free_datastructures ();
4367 timevar_push (TV_OUT_OF_SSA
);
4368 finish_out_of_ssa (&SA
);
4369 timevar_pop (TV_OUT_OF_SSA
);
4371 timevar_push (TV_POST_EXPAND
);
4372 /* We are no longer in SSA form. */
4373 cfun
->gimple_df
->in_ssa_p
= false;
4375 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
4376 conservatively to true until they are all profile aware. */
4377 pointer_map_destroy (lab_rtx_for_bb
);
4380 construct_exit_block ();
4381 set_curr_insn_block (DECL_INITIAL (current_function_decl
));
4382 insn_locators_finalize ();
4384 /* Zap the tree EH table. */
4385 set_eh_throw_stmt_table (cfun
, NULL
);
4387 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
4388 split edges which edge insertions might do. */
4389 rebuild_jump_labels (get_insns ());
4391 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
, EXIT_BLOCK_PTR
, next_bb
)
4395 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4399 rebuild_jump_labels_chain (e
->insns
.r
);
4400 /* Avoid putting insns before parm_birth_insn. */
4401 if (e
->src
== ENTRY_BLOCK_PTR
4402 && single_succ_p (ENTRY_BLOCK_PTR
)
4405 rtx insns
= e
->insns
.r
;
4406 e
->insns
.r
= NULL_RTX
;
4407 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
4410 commit_one_edge_insertion (e
);
4417 /* We're done expanding trees to RTL. */
4418 currently_expanding_to_rtl
= 0;
4420 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR
->next_bb
, EXIT_BLOCK_PTR
, next_bb
)
4424 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4426 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
4427 e
->flags
&= ~EDGE_EXECUTABLE
;
4429 /* At the moment not all abnormal edges match the RTL
4430 representation. It is safe to remove them here as
4431 find_many_sub_basic_blocks will rediscover them.
4432 In the future we should get this fixed properly. */
4433 if ((e
->flags
& EDGE_ABNORMAL
)
4434 && !(e
->flags
& EDGE_SIBCALL
))
4441 blocks
= sbitmap_alloc (last_basic_block
);
4442 sbitmap_ones (blocks
);
4443 find_many_sub_basic_blocks (blocks
);
4444 sbitmap_free (blocks
);
4445 purge_all_dead_edges ();
4449 expand_stack_alignment ();
4451 #ifdef ENABLE_CHECKING
4452 verify_flow_info ();
4455 /* There's no need to defer outputting this function any more; we
4456 know we want to output it. */
4457 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
4459 /* Now that we're done expanding trees to RTL, we shouldn't have any
4460 more CONCATs anywhere. */
4461 generating_concat_p
= 0;
4466 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
4467 /* And the pass manager will dump RTL for us. */
4470 /* If we're emitting a nested function, make sure its parent gets
4471 emitted as well. Doing otherwise confuses debug info. */
4474 for (parent
= DECL_CONTEXT (current_function_decl
);
4475 parent
!= NULL_TREE
;
4476 parent
= get_containing_scope (parent
))
4477 if (TREE_CODE (parent
) == FUNCTION_DECL
)
4478 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
4481 /* We are now committed to emitting code for this function. Do any
4482 preparation, such as emitting abstract debug info for the inline
4483 before it gets mangled by optimization. */
4484 if (cgraph_function_possibly_inlined_p (current_function_decl
))
4485 (*debug_hooks
->outlining_inline_function
) (current_function_decl
);
4487 TREE_ASM_WRITTEN (current_function_decl
) = 1;
4489 /* After expanding, the return labels are no longer needed. */
4490 return_label
= NULL
;
4491 naked_return_label
= NULL
;
4493 /* After expanding, the tm_restart map is no longer needed. */
4494 if (cfun
->gimple_df
->tm_restart
)
4496 htab_delete (cfun
->gimple_df
->tm_restart
);
4497 cfun
->gimple_df
->tm_restart
= NULL
;
4500 /* Tag the blocks with a depth number so that change_scope can find
4501 the common parent easily. */
4502 set_block_levels (DECL_INITIAL (cfun
->decl
), 0);
4503 default_rtl_profile ();
4504 timevar_pop (TV_POST_EXPAND
);
4508 struct rtl_opt_pass pass_expand
=
4512 "expand", /* name */
4514 gimple_expand_cfg
, /* execute */
4517 0, /* static_pass_number */
4518 TV_EXPAND
, /* tv_id */
4519 PROP_ssa
| PROP_gimple_leh
| PROP_cfg
4520 | PROP_gimple_lcx
, /* properties_required */
4521 PROP_rtl
, /* properties_provided */
4522 PROP_ssa
| PROP_trees
, /* properties_destroyed */
4523 TODO_verify_ssa
| TODO_verify_flow
4524 | TODO_verify_stmts
, /* todo_flags_start */
4525 TODO_ggc_collect
/* todo_flags_finish */