1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
73 #include "tree-ssa-address.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* This variable holds information helping the rewriting of SSA trees
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple
*currently_expanding_gimple_stmt
;
93 static rtx
expand_debug_expr (tree
);
95 static bool defer_stack_allocation (tree
, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
103 gimple_assign_rhs_to_tree (gimple
*stmt
)
106 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
108 case GIMPLE_TERNARY_RHS
:
109 t
= build3 (gimple_assign_rhs_code (stmt
),
110 TREE_TYPE (gimple_assign_lhs (stmt
)),
111 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
),
112 gimple_assign_rhs3 (stmt
));
114 case GIMPLE_BINARY_RHS
:
115 t
= build2 (gimple_assign_rhs_code (stmt
),
116 TREE_TYPE (gimple_assign_lhs (stmt
)),
117 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
));
119 case GIMPLE_UNARY_RHS
:
120 t
= build1 (gimple_assign_rhs_code (stmt
),
121 TREE_TYPE (gimple_assign_lhs (stmt
)),
122 gimple_assign_rhs1 (stmt
));
124 case GIMPLE_SINGLE_RHS
:
126 t
= gimple_assign_rhs1 (stmt
);
127 /* Avoid modifying this tree in place below. */
128 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
129 && gimple_location (stmt
) != EXPR_LOCATION (t
))
130 || (gimple_block (stmt
) && currently_expanding_to_rtl
139 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
140 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
158 leader_merge (tree cur
, tree next
)
160 if (cur
== NULL
|| cur
== next
)
163 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
166 if (DECL_P (next
) && DECL_IGNORED_P (next
))
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
176 set_rtl (tree t
, rtx x
)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
180 || (use_register_for_decl (t
)
182 || (GET_CODE (x
) == CONCAT
183 && (REG_P (XEXP (x
, 0))
184 || SUBREG_P (XEXP (x
, 0)))
185 && (REG_P (XEXP (x
, 1))
186 || SUBREG_P (XEXP (x
, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x
) == PARALLEL
193 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
194 && (GET_MODE (x
) == BLKmode
195 || !flag_tree_coalesce_vars
)))
196 : (MEM_P (x
) || x
== pc_rtx
197 || (GET_CODE (x
) == CONCAT
198 && MEM_P (XEXP (x
, 0))
199 && MEM_P (XEXP (x
, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
209 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
211 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
212 && (promote_ssa_mode (t
, NULL
) == BLKmode
213 || !flag_tree_coalesce_vars
))
214 || !use_register_for_decl (t
)
215 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
220 tree cur
= NULL_TREE
;
228 else if (SUBREG_P (xm
))
230 gcc_assert (subreg_lowpart_p (xm
));
231 xm
= SUBREG_REG (xm
);
234 else if (GET_CODE (xm
) == CONCAT
)
239 else if (GET_CODE (xm
) == PARALLEL
)
241 xm
= XVECEXP (xm
, 0, 0);
242 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
246 else if (xm
== pc_rtx
)
251 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
256 set_mem_attributes (x
,
257 next
&& TREE_CODE (next
) == SSA_NAME
261 set_reg_attrs_for_decl_rtl (next
, x
);
265 if (TREE_CODE (t
) == SSA_NAME
)
267 int part
= var_to_partition (SA
.map
, t
);
268 if (part
!= NO_PARTITION
)
270 if (SA
.partition_to_pseudo
[part
])
271 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
272 else if (x
!= pc_rtx
)
273 SA
.partition_to_pseudo
[part
] = x
;
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
279 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
280 && (VAR_P (SSA_NAME_VAR (t
))
281 || SSA_NAME_IS_DEFAULT_DEF (t
)))
283 tree var
= SSA_NAME_VAR (t
);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var
))
286 SET_DECL_RTL (var
, x
);
287 /* If we have it set already to "multiple places" don't
289 else if (DECL_RTL (var
) == pc_rtx
)
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var
) != x
)
298 SET_DECL_RTL (var
, pc_rtx
);
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
321 /* The partition representative. */
322 size_t representative
;
324 /* The next stack variable in the partition, or EOC. */
327 /* The numbers of conflicting stack variables. */
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static class stack_var
*stack_vars
;
335 static size_t stack_vars_alloc
;
336 static size_t stack_vars_num
;
337 static hash_map
<tree
, size_t> *decl_to_stack_part
;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack
;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted
;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase
;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls
;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer
;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
364 align_local_variable (tree decl
, bool really_expand
)
368 if (TREE_CODE (decl
) == SSA_NAME
)
369 align
= TYPE_ALIGN (TREE_TYPE (decl
));
372 align
= LOCAL_DECL_ALIGNMENT (decl
);
373 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 That is done before IPA and could bump alignment based on host
375 backend even for offloaded code which wants different
376 LOCAL_DECL_ALIGNMENT. */
378 SET_DECL_ALIGN (decl
, align
);
380 return align
/ BITS_PER_UNIT
;
383 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
384 down otherwise. Return truncated BASE value. */
386 static inline unsigned HOST_WIDE_INT
387 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
389 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */
396 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
398 poly_int64 offset
, new_frame_offset
;
400 if (FRAME_GROWS_DOWNWARD
)
403 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
404 align
) + frame_phase
;
405 offset
= new_frame_offset
;
410 = aligned_upper_bound (frame_offset
- frame_phase
,
411 align
) + frame_phase
;
412 offset
= new_frame_offset
;
413 new_frame_offset
+= size
;
415 frame_offset
= new_frame_offset
;
417 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
418 frame_offset
= offset
= 0;
423 /* Accumulate DECL into STACK_VARS. */
426 add_stack_var (tree decl
, bool really_expand
)
430 if (stack_vars_num
>= stack_vars_alloc
)
432 if (stack_vars_alloc
)
433 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
435 stack_vars_alloc
= 32;
437 = XRESIZEVEC (class stack_var
, stack_vars
, stack_vars_alloc
);
439 if (!decl_to_stack_part
)
440 decl_to_stack_part
= new hash_map
<tree
, size_t>;
442 v
= &stack_vars
[stack_vars_num
];
443 decl_to_stack_part
->put (decl
, stack_vars_num
);
446 tree size
= TREE_CODE (decl
) == SSA_NAME
447 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
448 : DECL_SIZE_UNIT (decl
);
449 v
->size
= tree_to_poly_uint64 (size
);
450 /* Ensure that all variables have size, so that &a != &b for any two
451 variables that are simultaneously live. */
452 if (known_eq (v
->size
, 0U))
454 v
->alignb
= align_local_variable (decl
, really_expand
);
455 /* An alignment of zero can mightily confuse us later. */
456 gcc_assert (v
->alignb
!= 0);
458 /* All variables are initially in their own partition. */
459 v
->representative
= stack_vars_num
;
462 /* All variables initially conflict with no other. */
465 /* Ensure that this decl doesn't get put onto the list twice. */
466 set_rtl (decl
, pc_rtx
);
471 /* Make the decls associated with luid's X and Y conflict. */
474 add_stack_var_conflict (size_t x
, size_t y
)
476 class stack_var
*a
= &stack_vars
[x
];
477 class stack_var
*b
= &stack_vars
[y
];
481 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
483 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
484 bitmap_set_bit (a
->conflicts
, y
);
485 bitmap_set_bit (b
->conflicts
, x
);
488 /* Check whether the decls associated with luid's X and Y conflict. */
491 stack_var_conflict_p (size_t x
, size_t y
)
493 class stack_var
*a
= &stack_vars
[x
];
494 class stack_var
*b
= &stack_vars
[y
];
497 /* Partitions containing an SSA name result from gimple registers
498 with things like unsupported modes. They are top-level and
499 hence conflict with everything else. */
500 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
503 if (!a
->conflicts
|| !b
->conflicts
)
505 return bitmap_bit_p (a
->conflicts
, y
);
508 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
509 enter its partition number into bitmap DATA. */
512 visit_op (gimple
*, tree op
, tree
, void *data
)
514 bitmap active
= (bitmap
)data
;
515 op
= get_base_address (op
);
518 && DECL_RTL_IF_SET (op
) == pc_rtx
)
520 size_t *v
= decl_to_stack_part
->get (op
);
522 bitmap_set_bit (active
, *v
);
527 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
528 record conflicts between it and all currently active other partitions
532 visit_conflict (gimple
*, tree op
, tree
, void *data
)
534 bitmap active
= (bitmap
)data
;
535 op
= get_base_address (op
);
538 && DECL_RTL_IF_SET (op
) == pc_rtx
)
540 size_t *v
= decl_to_stack_part
->get (op
);
541 if (v
&& bitmap_set_bit (active
, *v
))
546 gcc_assert (num
< stack_vars_num
);
547 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
548 add_stack_var_conflict (num
, i
);
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555 at the end of BB, leaving the result in WORK. We're called to generate
556 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
560 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
564 gimple_stmt_iterator gsi
;
565 walk_stmt_load_store_addr_fn visit
;
568 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
569 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
573 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
575 gimple
*stmt
= gsi_stmt (gsi
);
576 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
578 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
580 gimple
*stmt
= gsi_stmt (gsi
);
582 if (gimple_clobber_p (stmt
))
584 tree lhs
= gimple_assign_lhs (stmt
);
586 /* Nested function lowering might introduce LHSs
587 that are COMPONENT_REFs. */
590 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
591 && (v
= decl_to_stack_part
->get (lhs
)))
592 bitmap_clear_bit (work
, *v
);
594 else if (!is_gimple_debug (stmt
))
597 && visit
== visit_op
)
599 /* If this is the first real instruction in this BB we need
600 to add conflicts for everything live at this point now.
601 Unlike classical liveness for named objects we can't
602 rely on seeing a def/use of the names we're interested in.
603 There might merely be indirect loads/stores. We'd not add any
604 conflicts for such partitions. */
607 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
609 class stack_var
*a
= &stack_vars
[i
];
611 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
612 bitmap_ior_into (a
->conflicts
, work
);
614 visit
= visit_conflict
;
616 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
621 /* Generate stack partition conflicts between all partitions that are
622 simultaneously live. */
625 add_scope_conflicts (void)
629 bitmap work
= BITMAP_ALLOC (NULL
);
633 /* We approximate the live range of a stack variable by taking the first
634 mention of its name as starting point(s), and by the end-of-scope
635 death clobber added by gimplify as ending point(s) of the range.
636 This overapproximates in the case we for instance moved an address-taken
637 operation upward, without also moving a dereference to it upwards.
638 But it's conservatively correct as a variable never can hold values
639 before its name is mentioned at least once.
641 We then do a mostly classical bitmap liveness algorithm. */
643 FOR_ALL_BB_FN (bb
, cfun
)
644 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
646 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
647 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
654 for (i
= 0; i
< n_bbs
; i
++)
657 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
658 active
= (bitmap
)bb
->aux
;
659 add_scope_conflicts_1 (bb
, work
, false);
660 if (bitmap_ior_into (active
, work
))
665 FOR_EACH_BB_FN (bb
, cfun
)
666 add_scope_conflicts_1 (bb
, work
, true);
670 FOR_ALL_BB_FN (bb
, cfun
)
671 BITMAP_FREE (bb
->aux
);
674 /* A subroutine of partition_stack_vars. A comparison function for qsort,
675 sorting an array of indices by the properties of the object. */
678 stack_var_cmp (const void *a
, const void *b
)
680 size_t ia
= *(const size_t *)a
;
681 size_t ib
= *(const size_t *)b
;
682 unsigned int aligna
= stack_vars
[ia
].alignb
;
683 unsigned int alignb
= stack_vars
[ib
].alignb
;
684 poly_int64 sizea
= stack_vars
[ia
].size
;
685 poly_int64 sizeb
= stack_vars
[ib
].size
;
686 tree decla
= stack_vars
[ia
].decl
;
687 tree declb
= stack_vars
[ib
].decl
;
689 unsigned int uida
, uidb
;
691 /* Primary compare on "large" alignment. Large comes first. */
692 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
693 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
694 if (largea
!= largeb
)
695 return (int)largeb
- (int)largea
;
697 /* Secondary compare on size, decreasing */
698 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
702 /* Tertiary compare on true alignment, decreasing. */
708 /* Final compare on ID for sort stability, increasing.
709 Two SSA names are compared by their version, SSA names come before
710 non-SSA names, and two normal decls are compared by their DECL_UID. */
711 if (TREE_CODE (decla
) == SSA_NAME
)
713 if (TREE_CODE (declb
) == SSA_NAME
)
714 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
718 else if (TREE_CODE (declb
) == SSA_NAME
)
721 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
729 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
730 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
732 /* If the points-to solution *PI points to variables that are in a partition
733 together with other variables add all partition members to the pointed-to
737 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
738 part_hashmap
*decls_to_partitions
,
739 hash_set
<bitmap
> *visited
, bitmap temp
)
747 /* The pointed-to vars bitmap is shared, it is enough to
749 || visited
->add (pt
->vars
))
754 /* By using a temporary bitmap to store all members of the partitions
755 we have to add we make sure to visit each of the partitions only
757 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
759 || !bitmap_bit_p (temp
, i
))
760 && (part
= decls_to_partitions
->get (i
)))
761 bitmap_ior_into (temp
, *part
);
762 if (!bitmap_empty_p (temp
))
763 bitmap_ior_into (pt
->vars
, temp
);
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767 The bitmaps representing stack partitions will be saved until expand,
768 where partitioned decls used as bases in memory expressions will be
772 update_alias_info_with_stack_vars (void)
774 part_hashmap
*decls_to_partitions
= NULL
;
776 tree var
= NULL_TREE
;
778 for (i
= 0; i
< stack_vars_num
; i
++)
782 struct ptr_info_def
*pi
;
784 /* Not interested in partitions with single variable. */
785 if (stack_vars
[i
].representative
!= i
786 || stack_vars
[i
].next
== EOC
)
789 if (!decls_to_partitions
)
791 decls_to_partitions
= new part_hashmap
;
792 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
795 /* Create an SSA_NAME that points to the partition for use
796 as base during alias-oracle queries on RTL for bases that
797 have been partitioned. */
798 if (var
== NULL_TREE
)
799 var
= create_tmp_var (ptr_type_node
);
800 name
= make_ssa_name (var
);
802 /* Create bitmaps representing partitions. They will be used for
803 points-to sets later, so use GGC alloc. */
804 part
= BITMAP_GGC_ALLOC ();
805 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
807 tree decl
= stack_vars
[j
].decl
;
808 unsigned int uid
= DECL_PT_UID (decl
);
809 bitmap_set_bit (part
, uid
);
810 decls_to_partitions
->put (uid
, part
);
811 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
812 if (TREE_ADDRESSABLE (decl
))
813 TREE_ADDRESSABLE (name
) = 1;
816 /* Make the SSA name point to all partition members. */
817 pi
= get_ptr_info (name
);
818 pt_solution_set (&pi
->pt
, part
, false);
821 /* Make all points-to sets that contain one member of a partition
822 contain all members of the partition. */
823 if (decls_to_partitions
)
827 hash_set
<bitmap
> visited
;
828 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
830 FOR_EACH_SSA_NAME (i
, name
, cfun
)
832 struct ptr_info_def
*pi
;
834 if (POINTER_TYPE_P (TREE_TYPE (name
))
835 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
836 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
840 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
841 decls_to_partitions
, &visited
, temp
);
843 delete decls_to_partitions
;
848 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
849 partitioning algorithm. Partitions A and B are known to be non-conflicting.
850 Merge them into a single partition A. */
853 union_stack_vars (size_t a
, size_t b
)
855 class stack_var
*vb
= &stack_vars
[b
];
859 gcc_assert (stack_vars
[b
].next
== EOC
);
860 /* Add B to A's partition. */
861 stack_vars
[b
].next
= stack_vars
[a
].next
;
862 stack_vars
[b
].representative
= a
;
863 stack_vars
[a
].next
= b
;
865 /* Update the required alignment of partition A to account for B. */
866 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
867 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
869 /* Update the interference graph and merge the conflicts. */
872 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
873 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
874 BITMAP_FREE (vb
->conflicts
);
878 /* A subroutine of expand_used_vars. Binpack the variables into
879 partitions constrained by the interference graph. The overall
880 algorithm used is as follows:
882 Sort the objects by size in descending order.
887 Look for the largest non-conflicting object B with size <= S.
894 partition_stack_vars (void)
896 size_t si
, sj
, n
= stack_vars_num
;
898 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
899 for (si
= 0; si
< n
; ++si
)
900 stack_vars_sorted
[si
] = si
;
905 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
907 for (si
= 0; si
< n
; ++si
)
909 size_t i
= stack_vars_sorted
[si
];
910 unsigned int ialign
= stack_vars
[i
].alignb
;
911 poly_int64 isize
= stack_vars
[i
].size
;
913 /* Ignore objects that aren't partition representatives. If we
914 see a var that is not a partition representative, it must
915 have been merged earlier. */
916 if (stack_vars
[i
].representative
!= i
)
919 for (sj
= si
+ 1; sj
< n
; ++sj
)
921 size_t j
= stack_vars_sorted
[sj
];
922 unsigned int jalign
= stack_vars
[j
].alignb
;
923 poly_int64 jsize
= stack_vars
[j
].size
;
925 /* Ignore objects that aren't partition representatives. */
926 if (stack_vars
[j
].representative
!= j
)
929 /* Do not mix objects of "small" (supported) alignment
930 and "large" (unsupported) alignment. */
931 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
932 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
935 /* For Address Sanitizer do not mix objects with different
936 sizes, as the shorter vars wouldn't be adequately protected.
937 Don't do that for "large" (unsupported) alignment objects,
938 those aren't protected anyway. */
939 if (asan_sanitize_stack_p ()
940 && maybe_ne (isize
, jsize
)
941 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
944 /* Ignore conflicting objects. */
945 if (stack_var_conflict_p (i
, j
))
948 /* UNION the objects, placing J at OFFSET. */
949 union_stack_vars (i
, j
);
953 update_alias_info_with_stack_vars ();
956 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
959 dump_stack_var_partition (void)
961 size_t si
, i
, j
, n
= stack_vars_num
;
963 for (si
= 0; si
< n
; ++si
)
965 i
= stack_vars_sorted
[si
];
967 /* Skip variables that aren't partition representatives, for now. */
968 if (stack_vars
[i
].representative
!= i
)
971 fprintf (dump_file
, "Partition %lu: size ", (unsigned long) i
);
972 print_dec (stack_vars
[i
].size
, dump_file
);
973 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
975 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
977 fputc ('\t', dump_file
);
978 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
980 fputc ('\n', dump_file
);
984 /* Assign rtl to DECL at BASE + OFFSET. */
987 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
993 /* If this fails, we've overflowed the stack frame. Error nicely? */
994 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
996 x
= plus_constant (Pmode
, base
, offset
);
997 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
998 ? TYPE_MODE (TREE_TYPE (decl
))
999 : DECL_MODE (SSAVAR (decl
)), x
);
1001 if (TREE_CODE (decl
) != SSA_NAME
)
1003 /* Set alignment we actually gave this decl if it isn't an SSA name.
1004 If it is we generate stack slots only accidentally so it isn't as
1005 important, we'll simply use the alignment that is already set. */
1006 if (base
== virtual_stack_vars_rtx
)
1007 offset
-= frame_phase
;
1008 align
= known_alignment (offset
);
1009 align
*= BITS_PER_UNIT
;
1010 if (align
== 0 || align
> base_align
)
1013 /* One would think that we could assert that we're not decreasing
1014 alignment here, but (at least) the i386 port does exactly this
1015 via the MINIMUM_ALIGNMENT hook. */
1017 SET_DECL_ALIGN (decl
, align
);
1018 DECL_USER_ALIGN (decl
) = 0;
1024 class stack_vars_data
1027 /* Vector of offset pairs, always end of some padding followed
1028 by start of the padding that needs Address Sanitizer protection.
1029 The vector is in reversed, highest offset pairs come first. */
1030 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1032 /* Vector of partition representative decls in between the paddings. */
1033 auto_vec
<tree
> asan_decl_vec
;
1035 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1038 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1039 unsigned int asan_alignb
;
1042 /* A subroutine of expand_used_vars. Give each partition representative
1043 a unique location within the stack frame. Update each partition member
1044 with that location. */
1047 expand_stack_vars (bool (*pred
) (size_t), class stack_vars_data
*data
)
1049 size_t si
, i
, j
, n
= stack_vars_num
;
1050 poly_uint64 large_size
= 0, large_alloc
= 0;
1051 rtx large_base
= NULL
;
1052 unsigned large_align
= 0;
1053 bool large_allocation_done
= false;
1056 /* Determine if there are any variables requiring "large" alignment.
1057 Since these are dynamically allocated, we only process these if
1058 no predicate involved. */
1059 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1060 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1062 /* Find the total size of these variables. */
1063 for (si
= 0; si
< n
; ++si
)
1067 i
= stack_vars_sorted
[si
];
1068 alignb
= stack_vars
[i
].alignb
;
1070 /* All "large" alignment decls come before all "small" alignment
1071 decls, but "large" alignment decls are not sorted based on
1072 their alignment. Increase large_align to track the largest
1073 required alignment. */
1074 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1075 large_align
= alignb
* BITS_PER_UNIT
;
1077 /* Stop when we get to the first decl with "small" alignment. */
1078 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1081 /* Skip variables that aren't partition representatives. */
1082 if (stack_vars
[i
].representative
!= i
)
1085 /* Skip variables that have already had rtl assigned. See also
1086 add_stack_var where we perpetrate this pc_rtx hack. */
1087 decl
= stack_vars
[i
].decl
;
1088 if (TREE_CODE (decl
) == SSA_NAME
1089 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1090 : DECL_RTL (decl
) != pc_rtx
)
1093 large_size
= aligned_upper_bound (large_size
, alignb
);
1094 large_size
+= stack_vars
[i
].size
;
1098 for (si
= 0; si
< n
; ++si
)
1101 unsigned base_align
, alignb
;
1104 i
= stack_vars_sorted
[si
];
1106 /* Skip variables that aren't partition representatives, for now. */
1107 if (stack_vars
[i
].representative
!= i
)
1110 /* Skip variables that have already had rtl assigned. See also
1111 add_stack_var where we perpetrate this pc_rtx hack. */
1112 decl
= stack_vars
[i
].decl
;
1113 if (TREE_CODE (decl
) == SSA_NAME
1114 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1115 : DECL_RTL (decl
) != pc_rtx
)
1118 /* Check the predicate to see whether this variable should be
1119 allocated in this pass. */
1120 if (pred
&& !pred (i
))
1123 alignb
= stack_vars
[i
].alignb
;
1124 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1126 base
= virtual_stack_vars_rtx
;
1127 /* ASAN description strings don't yet have a syntax for expressing
1128 polynomial offsets. */
1129 HOST_WIDE_INT prev_offset
;
1130 if (asan_sanitize_stack_p ()
1132 && frame_offset
.is_constant (&prev_offset
)
1133 && stack_vars
[i
].size
.is_constant ())
1135 if (data
->asan_vec
.is_empty ())
1137 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE
);
1138 prev_offset
= frame_offset
.to_constant ();
1140 prev_offset
= align_base (prev_offset
,
1141 ASAN_MIN_RED_ZONE_SIZE
,
1142 !FRAME_GROWS_DOWNWARD
);
1143 tree repr_decl
= NULL_TREE
;
1144 unsigned HOST_WIDE_INT size
1145 = asan_var_and_redzone_size (stack_vars
[i
].size
.to_constant ());
1146 if (data
->asan_vec
.is_empty ())
1147 size
= MAX (size
, ASAN_RED_ZONE_SIZE
);
1149 unsigned HOST_WIDE_INT alignment
= MAX (alignb
,
1150 ASAN_MIN_RED_ZONE_SIZE
);
1151 offset
= alloc_stack_frame_space (size
, alignment
);
1153 data
->asan_vec
.safe_push (prev_offset
);
1154 /* Allocating a constant amount of space from a constant
1155 starting offset must give a constant result. */
1156 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1158 /* Find best representative of the partition.
1159 Prefer those with DECL_NAME, even better
1160 satisfying asan_protect_stack_decl predicate. */
1161 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1162 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1163 && DECL_NAME (stack_vars
[j
].decl
))
1165 repr_decl
= stack_vars
[j
].decl
;
1168 else if (repr_decl
== NULL_TREE
1169 && DECL_P (stack_vars
[j
].decl
)
1170 && DECL_NAME (stack_vars
[j
].decl
))
1171 repr_decl
= stack_vars
[j
].decl
;
1172 if (repr_decl
== NULL_TREE
)
1173 repr_decl
= stack_vars
[i
].decl
;
1174 data
->asan_decl_vec
.safe_push (repr_decl
);
1176 /* Make sure a representative is unpoison if another
1177 variable in the partition is handled by
1178 use-after-scope sanitization. */
1179 if (asan_handled_variables
!= NULL
1180 && !asan_handled_variables
->contains (repr_decl
))
1182 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1183 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1186 asan_handled_variables
->add (repr_decl
);
1189 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1190 if (data
->asan_base
== NULL
)
1191 data
->asan_base
= gen_reg_rtx (Pmode
);
1192 base
= data
->asan_base
;
1194 if (!STRICT_ALIGNMENT
)
1195 base_align
= crtl
->max_used_stack_slot_alignment
;
1197 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1198 GET_MODE_ALIGNMENT (SImode
)
1199 << ASAN_SHADOW_SHIFT
);
1203 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1204 base_align
= crtl
->max_used_stack_slot_alignment
;
1209 /* Large alignment is only processed in the last pass. */
1213 /* If there were any variables requiring "large" alignment, allocate
1215 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1218 rtx large_allocsize
;
1220 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1221 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1222 loffset
= alloc_stack_frame_space
1223 (rtx_to_poly_int64 (large_allocsize
),
1224 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1225 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1226 large_allocation_done
= true;
1228 gcc_assert (large_base
!= NULL
);
1230 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1231 offset
= large_alloc
;
1232 large_alloc
+= stack_vars
[i
].size
;
1235 base_align
= large_align
;
1238 /* Create rtl for each variable based on their location within the
1240 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1242 expand_one_stack_var_at (stack_vars
[j
].decl
,
1248 gcc_assert (known_eq (large_alloc
, large_size
));
1251 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1253 account_stack_vars (void)
1255 size_t si
, j
, i
, n
= stack_vars_num
;
1256 poly_uint64 size
= 0;
1258 for (si
= 0; si
< n
; ++si
)
1260 i
= stack_vars_sorted
[si
];
1262 /* Skip variables that aren't partition representatives, for now. */
1263 if (stack_vars
[i
].representative
!= i
)
1266 size
+= stack_vars
[i
].size
;
1267 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1268 set_rtl (stack_vars
[j
].decl
, NULL
);
1273 /* Record the RTL assignment X for the default def of PARM. */
1276 set_parm_rtl (tree parm
, rtx x
)
1278 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1279 || TREE_CODE (parm
) == RESULT_DECL
);
1281 if (x
&& !MEM_P (x
))
1283 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1284 TYPE_MODE (TREE_TYPE (parm
)),
1285 TYPE_ALIGN (TREE_TYPE (parm
)));
1287 /* If the variable alignment is very large we'll dynamicaly
1288 allocate it, which means that in-frame portion is just a
1289 pointer. ??? We've got a pseudo for sure here, do we
1290 actually dynamically allocate its spilling area if needed?
1291 ??? Isn't it a problem when Pmode alignment also exceeds
1292 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1293 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1294 align
= GET_MODE_ALIGNMENT (Pmode
);
1296 record_alignment_for_reg_var (align
);
1299 tree ssa
= ssa_default_def (cfun
, parm
);
1301 return set_rtl (parm
, x
);
1303 int part
= var_to_partition (SA
.map
, ssa
);
1304 gcc_assert (part
!= NO_PARTITION
);
1306 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1307 gcc_assert (changed
);
1310 gcc_assert (DECL_RTL (parm
) == x
);
1313 /* A subroutine of expand_one_var. Called to immediately assign rtl
1314 to a variable to be allocated in the stack frame. */
1317 expand_one_stack_var_1 (tree var
)
1321 unsigned byte_align
;
1323 if (TREE_CODE (var
) == SSA_NAME
)
1325 tree type
= TREE_TYPE (var
);
1326 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1327 byte_align
= TYPE_ALIGN_UNIT (type
);
1331 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1332 byte_align
= align_local_variable (var
, true);
1335 /* We handle highly aligned variables in expand_stack_vars. */
1336 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1338 offset
= alloc_stack_frame_space (size
, byte_align
);
1340 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1341 crtl
->max_used_stack_slot_alignment
, offset
);
1344 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1345 already assigned some MEM. */
1348 expand_one_stack_var (tree var
)
1350 if (TREE_CODE (var
) == SSA_NAME
)
1352 int part
= var_to_partition (SA
.map
, var
);
1353 if (part
!= NO_PARTITION
)
1355 rtx x
= SA
.partition_to_pseudo
[part
];
1357 gcc_assert (MEM_P (x
));
1362 return expand_one_stack_var_1 (var
);
1365 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1366 that will reside in a hard register. */
1369 expand_one_hard_reg_var (tree var
)
1371 rest_of_decl_compilation (var
, 0, 0);
1374 /* Record the alignment requirements of some variable assigned to a
1378 record_alignment_for_reg_var (unsigned int align
)
1380 if (SUPPORTS_STACK_ALIGNMENT
1381 && crtl
->stack_alignment_estimated
< align
)
1383 /* stack_alignment_estimated shouldn't change after stack
1384 realign decision made */
1385 gcc_assert (!crtl
->stack_realign_processed
);
1386 crtl
->stack_alignment_estimated
= align
;
1389 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1390 So here we only make sure stack_alignment_needed >= align. */
1391 if (crtl
->stack_alignment_needed
< align
)
1392 crtl
->stack_alignment_needed
= align
;
1393 if (crtl
->max_used_stack_slot_alignment
< align
)
1394 crtl
->max_used_stack_slot_alignment
= align
;
1397 /* Create RTL for an SSA partition. */
1400 expand_one_ssa_partition (tree var
)
1402 int part
= var_to_partition (SA
.map
, var
);
1403 gcc_assert (part
!= NO_PARTITION
);
1405 if (SA
.partition_to_pseudo
[part
])
1408 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1409 TYPE_MODE (TREE_TYPE (var
)),
1410 TYPE_ALIGN (TREE_TYPE (var
)));
1412 /* If the variable alignment is very large we'll dynamicaly allocate
1413 it, which means that in-frame portion is just a pointer. */
1414 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1415 align
= GET_MODE_ALIGNMENT (Pmode
);
1417 record_alignment_for_reg_var (align
);
1419 if (!use_register_for_decl (var
))
1421 if (defer_stack_allocation (var
, true))
1422 add_stack_var (var
, true);
1424 expand_one_stack_var_1 (var
);
1428 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1429 rtx x
= gen_reg_rtx (reg_mode
);
1433 /* For a promoted variable, X will not be used directly but wrapped in a
1434 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1435 will assume that its upper bits can be inferred from its lower bits.
1436 Therefore, if X isn't initialized on every path from the entry, then
1437 we must do it manually in order to fulfill the above assumption. */
1438 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1439 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1440 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1443 /* Record the association between the RTL generated for partition PART
1444 and the underlying variable of the SSA_NAME VAR. */
1447 adjust_one_expanded_partition_var (tree var
)
1452 tree decl
= SSA_NAME_VAR (var
);
1454 int part
= var_to_partition (SA
.map
, var
);
1455 if (part
== NO_PARTITION
)
1458 rtx x
= SA
.partition_to_pseudo
[part
];
1467 /* Note if the object is a user variable. */
1468 if (decl
&& !DECL_ARTIFICIAL (decl
))
1471 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1472 mark_reg_pointer (x
, get_pointer_alignment (var
));
1475 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1476 that will reside in a pseudo register. */
1479 expand_one_register_var (tree var
)
1481 if (TREE_CODE (var
) == SSA_NAME
)
1483 int part
= var_to_partition (SA
.map
, var
);
1484 if (part
!= NO_PARTITION
)
1486 rtx x
= SA
.partition_to_pseudo
[part
];
1488 gcc_assert (REG_P (x
));
1495 tree type
= TREE_TYPE (decl
);
1496 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1497 rtx x
= gen_reg_rtx (reg_mode
);
1501 /* Note if the object is a user variable. */
1502 if (!DECL_ARTIFICIAL (decl
))
1505 if (POINTER_TYPE_P (type
))
1506 mark_reg_pointer (x
, get_pointer_alignment (var
));
1509 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1510 has some associated error, e.g. its type is error-mark. We just need
1511 to pick something that won't crash the rest of the compiler. */
1514 expand_one_error_var (tree var
)
1516 machine_mode mode
= DECL_MODE (var
);
1519 if (mode
== BLKmode
)
1520 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1521 else if (mode
== VOIDmode
)
1524 x
= gen_reg_rtx (mode
);
1526 SET_DECL_RTL (var
, x
);
1529 /* A subroutine of expand_one_var. VAR is a variable that will be
1530 allocated to the local stack frame. Return true if we wish to
1531 add VAR to STACK_VARS so that it will be coalesced with other
1532 variables. Return false to allocate VAR immediately.
1534 This function is used to reduce the number of variables considered
1535 for coalescing, which reduces the size of the quadratic problem. */
1538 defer_stack_allocation (tree var
, bool toplevel
)
1540 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1541 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1542 : DECL_SIZE_UNIT (var
);
1545 /* Whether the variable is small enough for immediate allocation not to be
1546 a problem with regard to the frame size. */
1548 = (poly_int_tree_p (size_unit
, &size
)
1549 && (estimated_poly_value (size
)
1550 < param_min_size_for_stack_sharing
));
1552 /* If stack protection is enabled, *all* stack variables must be deferred,
1553 so that we can re-order the strings to the top of the frame.
1554 Similarly for Address Sanitizer. */
1555 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1558 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1559 ? TYPE_ALIGN (TREE_TYPE (var
))
1562 /* We handle "large" alignment via dynamic allocation. We want to handle
1563 this extra complication in only one place, so defer them. */
1564 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1567 bool ignored
= TREE_CODE (var
) == SSA_NAME
1568 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1569 : DECL_IGNORED_P (var
);
1571 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1572 might be detached from their block and appear at toplevel when we reach
1573 here. We want to coalesce them with variables from other blocks when
1574 the immediate contribution to the frame size would be noticeable. */
1575 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1578 /* Variables declared in the outermost scope automatically conflict
1579 with every other variable. The only reason to want to defer them
1580 at all is that, after sorting, we can more efficiently pack
1581 small variables in the stack frame. Continue to defer at -O2. */
1582 if (toplevel
&& optimize
< 2)
1585 /* Without optimization, *most* variables are allocated from the
1586 stack, which makes the quadratic problem large exactly when we
1587 want compilation to proceed as quickly as possible. On the
1588 other hand, we don't want the function's stack frame size to
1589 get completely out of hand. So we avoid adding scalars and
1590 "small" aggregates to the list at all. */
1591 if (optimize
== 0 && smallish
)
1597 /* A subroutine of expand_used_vars. Expand one variable according to
1598 its flavor. Variables to be placed on the stack are not actually
1599 expanded yet, merely recorded.
1600 When REALLY_EXPAND is false, only add stack values to be allocated.
1601 Return stack usage this variable is supposed to take.
1605 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1607 unsigned int align
= BITS_PER_UNIT
;
1612 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1614 if (is_global_var (var
))
1617 /* Because we don't know if VAR will be in register or on stack,
1618 we conservatively assume it will be on stack even if VAR is
1619 eventually put into register after RA pass. For non-automatic
1620 variables, which won't be on stack, we collect alignment of
1621 type and ignore user specified alignment. Similarly for
1622 SSA_NAMEs for which use_register_for_decl returns true. */
1623 if (TREE_STATIC (var
)
1624 || DECL_EXTERNAL (var
)
1625 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1626 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1627 TYPE_MODE (TREE_TYPE (var
)),
1628 TYPE_ALIGN (TREE_TYPE (var
)));
1629 else if (DECL_HAS_VALUE_EXPR_P (var
)
1630 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1631 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1632 or variables which were assigned a stack slot already by
1633 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1634 changed from the offset chosen to it. */
1635 align
= crtl
->stack_alignment_estimated
;
1637 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1639 /* If the variable alignment is very large we'll dynamicaly allocate
1640 it, which means that in-frame portion is just a pointer. */
1641 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1642 align
= GET_MODE_ALIGNMENT (Pmode
);
1645 record_alignment_for_reg_var (align
);
1648 if (TREE_CODE (origvar
) == SSA_NAME
)
1650 gcc_assert (!VAR_P (var
)
1651 || (!DECL_EXTERNAL (var
)
1652 && !DECL_HAS_VALUE_EXPR_P (var
)
1653 && !TREE_STATIC (var
)
1654 && TREE_TYPE (var
) != error_mark_node
1655 && !DECL_HARD_REGISTER (var
)
1658 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1660 else if (DECL_EXTERNAL (var
))
1662 else if (DECL_HAS_VALUE_EXPR_P (var
))
1664 else if (TREE_STATIC (var
))
1666 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1668 else if (TREE_TYPE (var
) == error_mark_node
)
1671 expand_one_error_var (var
);
1673 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1677 expand_one_hard_reg_var (var
);
1678 if (!DECL_HARD_REGISTER (var
))
1679 /* Invalid register specification. */
1680 expand_one_error_var (var
);
1683 else if (use_register_for_decl (var
))
1686 expand_one_register_var (origvar
);
1688 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1689 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1691 /* Reject variables which cover more than half of the address-space. */
1694 if (DECL_NONLOCAL_FRAME (var
))
1695 error_at (DECL_SOURCE_LOCATION (current_function_decl
),
1696 "total size of local objects is too large");
1698 error_at (DECL_SOURCE_LOCATION (var
),
1699 "size of variable %q+D is too large", var
);
1700 expand_one_error_var (var
);
1703 else if (defer_stack_allocation (var
, toplevel
))
1704 add_stack_var (origvar
, really_expand
);
1709 if (lookup_attribute ("naked",
1710 DECL_ATTRIBUTES (current_function_decl
)))
1711 error ("cannot allocate stack for variable %q+D, naked function",
1714 expand_one_stack_var (origvar
);
1721 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1722 expanding variables. Those variables that can be put into registers
1723 are allocated pseudos; those that can't are put on the stack.
1725 TOPLEVEL is true if this is the outermost BLOCK. */
1728 expand_used_vars_for_block (tree block
, bool toplevel
)
1732 /* Expand all variables at this level. */
1733 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1735 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1736 || !DECL_NONSHAREABLE (t
)))
1737 expand_one_var (t
, toplevel
, true);
1739 /* Expand all variables at containing levels. */
1740 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1741 expand_used_vars_for_block (t
, false);
1744 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1745 and clear TREE_USED on all local variables. */
1748 clear_tree_used (tree block
)
1752 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1753 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1754 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1755 || !DECL_NONSHAREABLE (t
))
1758 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1759 clear_tree_used (t
);
1763 SPCT_FLAG_DEFAULT
= 1,
1765 SPCT_FLAG_STRONG
= 3,
1766 SPCT_FLAG_EXPLICIT
= 4
1769 /* Examine TYPE and determine a bit mask of the following features. */
1771 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1772 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1773 #define SPCT_HAS_ARRAY 4
1774 #define SPCT_HAS_AGGREGATE 8
1777 stack_protect_classify_type (tree type
)
1779 unsigned int ret
= 0;
1782 switch (TREE_CODE (type
))
1785 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1786 if (t
== char_type_node
1787 || t
== signed_char_type_node
1788 || t
== unsigned_char_type_node
)
1790 unsigned HOST_WIDE_INT max
= param_ssp_buffer_size
;
1791 unsigned HOST_WIDE_INT len
;
1793 if (!TYPE_SIZE_UNIT (type
)
1794 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1797 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1800 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1802 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1805 ret
= SPCT_HAS_ARRAY
;
1809 case QUAL_UNION_TYPE
:
1811 ret
= SPCT_HAS_AGGREGATE
;
1812 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1813 if (TREE_CODE (t
) == FIELD_DECL
)
1814 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1824 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1825 part of the local stack frame. Remember if we ever return nonzero for
1826 any variable in this function. The return value is the phase number in
1827 which the variable should be allocated. */
1830 stack_protect_decl_phase (tree decl
)
1832 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1835 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1836 has_short_buffer
= true;
1838 if (flag_stack_protect
== SPCT_FLAG_ALL
1839 || flag_stack_protect
== SPCT_FLAG_STRONG
1840 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1841 && lookup_attribute ("stack_protect",
1842 DECL_ATTRIBUTES (current_function_decl
))))
1844 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1845 && !(bits
& SPCT_HAS_AGGREGATE
))
1847 else if (bits
& SPCT_HAS_ARRAY
)
1851 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1854 has_protected_decls
= true;
1859 /* Two helper routines that check for phase 1 and phase 2. These are used
1860 as callbacks for expand_stack_vars. */
1863 stack_protect_decl_phase_1 (size_t i
)
1865 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1869 stack_protect_decl_phase_2 (size_t i
)
1871 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1874 /* And helper function that checks for asan phase (with stack protector
1875 it is phase 3). This is used as callback for expand_stack_vars.
1876 Returns true if any of the vars in the partition need to be protected. */
1879 asan_decl_phase_3 (size_t i
)
1883 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1885 i
= stack_vars
[i
].next
;
1890 /* Ensure that variables in different stack protection phases conflict
1891 so that they are not merged and share the same stack slot. */
1894 add_stack_protection_conflicts (void)
1896 size_t i
, j
, n
= stack_vars_num
;
1897 unsigned char *phase
;
1899 phase
= XNEWVEC (unsigned char, n
);
1900 for (i
= 0; i
< n
; ++i
)
1901 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1903 for (i
= 0; i
< n
; ++i
)
1905 unsigned char ph_i
= phase
[i
];
1906 for (j
= i
+ 1; j
< n
; ++j
)
1907 if (ph_i
!= phase
[j
])
1908 add_stack_var_conflict (i
, j
);
1914 /* Create a decl for the guard at the top of the stack frame. */
1917 create_stack_guard (void)
1919 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1920 VAR_DECL
, NULL
, ptr_type_node
);
1921 TREE_THIS_VOLATILE (guard
) = 1;
1922 TREE_USED (guard
) = 1;
1923 expand_one_stack_var (guard
);
1924 crtl
->stack_protect_guard
= guard
;
1927 /* Prepare for expanding variables. */
1929 init_vars_expansion (void)
1931 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1932 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1934 /* A map from decl to stack partition. */
1935 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1937 /* Initialize local stack smashing state. */
1938 has_protected_decls
= false;
1939 has_short_buffer
= false;
1942 /* Free up stack variable graph data. */
1944 fini_vars_expansion (void)
1946 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1948 XDELETEVEC (stack_vars
);
1949 if (stack_vars_sorted
)
1950 XDELETEVEC (stack_vars_sorted
);
1952 stack_vars_sorted
= NULL
;
1953 stack_vars_alloc
= stack_vars_num
= 0;
1954 delete decl_to_stack_part
;
1955 decl_to_stack_part
= NULL
;
1958 /* Make a fair guess for the size of the stack frame of the function
1959 in NODE. This doesn't have to be exact, the result is only used in
1960 the inline heuristics. So we don't want to run the full stack var
1961 packing algorithm (which is quadratic in the number of stack vars).
1962 Instead, we calculate the total size of all stack vars. This turns
1963 out to be a pretty fair estimate -- packing of stack vars doesn't
1964 happen very often. */
1967 estimated_stack_frame_size (struct cgraph_node
*node
)
1969 poly_int64 size
= 0;
1972 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1976 init_vars_expansion ();
1978 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1979 if (auto_var_in_fn_p (var
, fn
->decl
))
1980 size
+= expand_one_var (var
, true, false);
1982 if (stack_vars_num
> 0)
1984 /* Fake sorting the stack vars for account_stack_vars (). */
1985 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1986 for (i
= 0; i
< stack_vars_num
; ++i
)
1987 stack_vars_sorted
[i
] = i
;
1988 size
+= account_stack_vars ();
1991 fini_vars_expansion ();
1993 return estimated_poly_value (size
);
1996 /* Helper routine to check if a record or union contains an array field. */
1999 record_or_union_type_has_array_p (const_tree tree_type
)
2001 tree fields
= TYPE_FIELDS (tree_type
);
2004 for (f
= fields
; f
; f
= DECL_CHAIN (f
))
2005 if (TREE_CODE (f
) == FIELD_DECL
)
2007 tree field_type
= TREE_TYPE (f
);
2008 if (RECORD_OR_UNION_TYPE_P (field_type
)
2009 && record_or_union_type_has_array_p (field_type
))
2011 if (TREE_CODE (field_type
) == ARRAY_TYPE
)
2017 /* Check if the current function has local referenced variables that
2018 have their addresses taken, contain an array, or are arrays. */
2021 stack_protect_decl_p ()
2026 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2027 if (!is_global_var (var
))
2029 tree var_type
= TREE_TYPE (var
);
2031 && (TREE_CODE (var_type
) == ARRAY_TYPE
2032 || TREE_ADDRESSABLE (var
)
2033 || (RECORD_OR_UNION_TYPE_P (var_type
)
2034 && record_or_union_type_has_array_p (var_type
))))
2040 /* Check if the current function has calls that use a return slot. */
2043 stack_protect_return_slot_p ()
2047 FOR_ALL_BB_FN (bb
, cfun
)
2048 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2049 !gsi_end_p (gsi
); gsi_next (&gsi
))
2051 gimple
*stmt
= gsi_stmt (gsi
);
2052 /* This assumes that calls to internal-only functions never
2053 use a return slot. */
2054 if (is_gimple_call (stmt
)
2055 && !gimple_call_internal_p (stmt
)
2056 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2057 gimple_call_fndecl (stmt
)))
2063 /* Expand all variables used in the function. */
2066 expand_used_vars (void)
2068 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2069 auto_vec
<tree
> maybe_local_decls
;
2070 rtx_insn
*var_end_seq
= NULL
;
2073 bool gen_stack_protect_signal
= false;
2075 /* Compute the phase of the stack frame for this function. */
2077 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2078 int off
= targetm
.starting_frame_offset () % align
;
2079 frame_phase
= off
? align
- off
: 0;
2082 /* Set TREE_USED on all variables in the local_decls. */
2083 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2084 TREE_USED (var
) = 1;
2085 /* Clear TREE_USED on all variables associated with a block scope. */
2086 clear_tree_used (DECL_INITIAL (current_function_decl
));
2088 init_vars_expansion ();
2090 if (targetm
.use_pseudo_pic_reg ())
2091 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2093 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2095 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2098 tree var
= partition_to_var (SA
.map
, i
);
2100 gcc_assert (!virtual_operand_p (var
));
2102 expand_one_ssa_partition (var
);
2105 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2106 gen_stack_protect_signal
2107 = stack_protect_decl_p () || stack_protect_return_slot_p ();
2109 /* At this point all variables on the local_decls with TREE_USED
2110 set are not associated with any block scope. Lay them out. */
2112 len
= vec_safe_length (cfun
->local_decls
);
2113 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2115 bool expand_now
= false;
2117 /* Expanded above already. */
2118 if (is_gimple_reg (var
))
2120 TREE_USED (var
) = 0;
2123 /* We didn't set a block for static or extern because it's hard
2124 to tell the difference between a global variable (re)declared
2125 in a local scope, and one that's really declared there to
2126 begin with. And it doesn't really matter much, since we're
2127 not giving them stack space. Expand them now. */
2128 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2131 /* Expand variables not associated with any block now. Those created by
2132 the optimizers could be live anywhere in the function. Those that
2133 could possibly have been scoped originally and detached from their
2134 block will have their allocation deferred so we coalesce them with
2135 others when optimization is enabled. */
2136 else if (TREE_USED (var
))
2139 /* Finally, mark all variables on the list as used. We'll use
2140 this in a moment when we expand those associated with scopes. */
2141 TREE_USED (var
) = 1;
2144 expand_one_var (var
, true, true);
2147 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2149 rtx rtl
= DECL_RTL_IF_SET (var
);
2151 /* Keep artificial non-ignored vars in cfun->local_decls
2152 chain until instantiate_decls. */
2153 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2154 add_local_decl (cfun
, var
);
2155 else if (rtl
== NULL_RTX
)
2156 /* If rtl isn't set yet, which can happen e.g. with
2157 -fstack-protector, retry before returning from this
2159 maybe_local_decls
.safe_push (var
);
2163 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2165 +-----------------+-----------------+
2166 | ...processed... | ...duplicates...|
2167 +-----------------+-----------------+
2169 +-- LEN points here.
2171 We just want the duplicates, as those are the artificial
2172 non-ignored vars that we want to keep until instantiate_decls.
2173 Move them down and truncate the array. */
2174 if (!vec_safe_is_empty (cfun
->local_decls
))
2175 cfun
->local_decls
->block_remove (0, len
);
2177 /* At this point, all variables within the block tree with TREE_USED
2178 set are actually used by the optimized function. Lay them out. */
2179 expand_used_vars_for_block (outer_block
, true);
2181 if (stack_vars_num
> 0)
2183 add_scope_conflicts ();
2185 /* If stack protection is enabled, we don't share space between
2186 vulnerable data and non-vulnerable data. */
2187 if (flag_stack_protect
!= 0
2188 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2189 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2190 && lookup_attribute ("stack_protect",
2191 DECL_ATTRIBUTES (current_function_decl
)))))
2192 add_stack_protection_conflicts ();
2194 /* Now that we have collected all stack variables, and have computed a
2195 minimal interference graph, attempt to save some stack space. */
2196 partition_stack_vars ();
2198 dump_stack_var_partition ();
2201 switch (flag_stack_protect
)
2204 create_stack_guard ();
2207 case SPCT_FLAG_STRONG
:
2208 if (gen_stack_protect_signal
2209 || cfun
->calls_alloca
|| has_protected_decls
2210 || lookup_attribute ("stack_protect",
2211 DECL_ATTRIBUTES (current_function_decl
)))
2212 create_stack_guard ();
2215 case SPCT_FLAG_DEFAULT
:
2216 if (cfun
->calls_alloca
|| has_protected_decls
2217 || lookup_attribute ("stack_protect",
2218 DECL_ATTRIBUTES (current_function_decl
)))
2219 create_stack_guard ();
2222 case SPCT_FLAG_EXPLICIT
:
2223 if (lookup_attribute ("stack_protect",
2224 DECL_ATTRIBUTES (current_function_decl
)))
2225 create_stack_guard ();
2231 /* Assign rtl to each variable based on these partitions. */
2232 if (stack_vars_num
> 0)
2234 class stack_vars_data data
;
2236 data
.asan_base
= NULL_RTX
;
2237 data
.asan_alignb
= 0;
2239 /* Reorder decls to be protected by iterating over the variables
2240 array multiple times, and allocating out of each phase in turn. */
2241 /* ??? We could probably integrate this into the qsort we did
2242 earlier, such that we naturally see these variables first,
2243 and thus naturally allocate things in the right order. */
2244 if (has_protected_decls
)
2246 /* Phase 1 contains only character arrays. */
2247 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2249 /* Phase 2 contains other kinds of arrays. */
2250 if (flag_stack_protect
== SPCT_FLAG_ALL
2251 || flag_stack_protect
== SPCT_FLAG_STRONG
2252 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2253 && lookup_attribute ("stack_protect",
2254 DECL_ATTRIBUTES (current_function_decl
))))
2255 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2258 if (asan_sanitize_stack_p ())
2259 /* Phase 3, any partitions that need asan protection
2260 in addition to phase 1 and 2. */
2261 expand_stack_vars (asan_decl_phase_3
, &data
);
2263 /* ASAN description strings don't yet have a syntax for expressing
2264 polynomial offsets. */
2265 HOST_WIDE_INT prev_offset
;
2266 if (!data
.asan_vec
.is_empty ()
2267 && frame_offset
.is_constant (&prev_offset
))
2269 HOST_WIDE_INT offset
, sz
, redzonesz
;
2270 redzonesz
= ASAN_RED_ZONE_SIZE
;
2271 sz
= data
.asan_vec
[0] - prev_offset
;
2272 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2273 && data
.asan_alignb
<= 4096
2274 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2275 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2276 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2277 /* Allocating a constant amount of space from a constant
2278 starting offset must give a constant result. */
2279 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2281 data
.asan_vec
.safe_push (prev_offset
);
2282 data
.asan_vec
.safe_push (offset
);
2283 /* Leave space for alignment if STRICT_ALIGNMENT. */
2284 if (STRICT_ALIGNMENT
)
2285 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2286 << ASAN_SHADOW_SHIFT
)
2287 / BITS_PER_UNIT
, 1);
2290 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2293 data
.asan_vec
.address (),
2294 data
.asan_decl_vec
.address (),
2295 data
.asan_vec
.length ());
2298 expand_stack_vars (NULL
, &data
);
2301 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2302 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2303 virtual_stack_vars_rtx
,
2306 fini_vars_expansion ();
2308 /* If there were any artificial non-ignored vars without rtl
2309 found earlier, see if deferred stack allocation hasn't assigned
2311 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2313 rtx rtl
= DECL_RTL_IF_SET (var
);
2315 /* Keep artificial non-ignored vars in cfun->local_decls
2316 chain until instantiate_decls. */
2317 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2318 add_local_decl (cfun
, var
);
2321 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2322 if (STACK_ALIGNMENT_NEEDED
)
2324 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2325 if (FRAME_GROWS_DOWNWARD
)
2326 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2328 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2335 /* If we need to produce a detailed dump, print the tree representation
2336 for STMT to the dump file. SINCE is the last RTX after which the RTL
2337 generated for STMT should have been appended. */
2340 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2342 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2344 fprintf (dump_file
, "\n;; ");
2345 print_gimple_stmt (dump_file
, stmt
, 0,
2346 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2347 fprintf (dump_file
, "\n");
2349 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2353 /* Maps the blocks that do not contain tree labels to rtx labels. */
2355 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2357 /* Returns the label_rtx expression for a label starting basic block BB. */
2359 static rtx_code_label
*
2360 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2362 gimple_stmt_iterator gsi
;
2365 if (bb
->flags
& BB_RTL
)
2366 return block_label (bb
);
2368 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2372 /* Find the tree label if it is present. */
2374 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2378 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2382 lab
= gimple_label_label (lab_stmt
);
2383 if (DECL_NONLOCAL (lab
))
2386 return jump_target_rtx (lab
);
2389 rtx_code_label
*l
= gen_label_rtx ();
2390 lab_rtx_for_bb
->put (bb
, l
);
2395 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2396 of a basic block where we just expanded the conditional at the end,
2397 possibly clean up the CFG and instruction sequence. LAST is the
2398 last instruction before the just emitted jump sequence. */
2401 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2403 /* Special case: when jumpif decides that the condition is
2404 trivial it emits an unconditional jump (and the necessary
2405 barrier). But we still have two edges, the fallthru one is
2406 wrong. purge_dead_edges would clean this up later. Unfortunately
2407 we have to insert insns (and split edges) before
2408 find_many_sub_basic_blocks and hence before purge_dead_edges.
2409 But splitting edges might create new blocks which depend on the
2410 fact that if there are two edges there's no barrier. So the
2411 barrier would get lost and verify_flow_info would ICE. Instead
2412 of auditing all edge splitters to care for the barrier (which
2413 normally isn't there in a cleaned CFG), fix it here. */
2414 if (BARRIER_P (get_last_insn ()))
2418 /* Now, we have a single successor block, if we have insns to
2419 insert on the remaining edge we potentially will insert
2420 it at the end of this block (if the dest block isn't feasible)
2421 in order to avoid splitting the edge. This insertion will take
2422 place in front of the last jump. But we might have emitted
2423 multiple jumps (conditional and one unconditional) to the
2424 same destination. Inserting in front of the last one then
2425 is a problem. See PR 40021. We fix this by deleting all
2426 jumps except the last unconditional one. */
2427 insn
= PREV_INSN (get_last_insn ());
2428 /* Make sure we have an unconditional jump. Otherwise we're
2430 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2431 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2433 insn
= PREV_INSN (insn
);
2434 if (JUMP_P (NEXT_INSN (insn
)))
2436 if (!any_condjump_p (NEXT_INSN (insn
)))
2438 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2439 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2441 delete_insn (NEXT_INSN (insn
));
2447 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2448 Returns a new basic block if we've terminated the current basic
2449 block and created a new one. */
2452 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2454 basic_block new_bb
, dest
;
2457 rtx_insn
*last2
, *last
;
2458 enum tree_code code
;
2461 code
= gimple_cond_code (stmt
);
2462 op0
= gimple_cond_lhs (stmt
);
2463 op1
= gimple_cond_rhs (stmt
);
2464 /* We're sometimes presented with such code:
2468 This would expand to two comparisons which then later might
2469 be cleaned up by combine. But some pattern matchers like if-conversion
2470 work better when there's only one compare, so make up for this
2471 here as special exception if TER would have made the same change. */
2473 && TREE_CODE (op0
) == SSA_NAME
2474 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2475 && TREE_CODE (op1
) == INTEGER_CST
2476 && ((gimple_cond_code (stmt
) == NE_EXPR
2477 && integer_zerop (op1
))
2478 || (gimple_cond_code (stmt
) == EQ_EXPR
2479 && integer_onep (op1
)))
2480 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2482 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2483 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2485 enum tree_code code2
= gimple_assign_rhs_code (second
);
2486 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2489 op0
= gimple_assign_rhs1 (second
);
2490 op1
= gimple_assign_rhs2 (second
);
2492 /* If jumps are cheap and the target does not support conditional
2493 compare, turn some more codes into jumpy sequences. */
2494 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2495 && targetm
.gen_ccmp_first
== NULL
)
2497 if ((code2
== BIT_AND_EXPR
2498 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2499 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2500 || code2
== TRUTH_AND_EXPR
)
2502 code
= TRUTH_ANDIF_EXPR
;
2503 op0
= gimple_assign_rhs1 (second
);
2504 op1
= gimple_assign_rhs2 (second
);
2506 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2508 code
= TRUTH_ORIF_EXPR
;
2509 op0
= gimple_assign_rhs1 (second
);
2510 op1
= gimple_assign_rhs2 (second
);
2516 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2517 into (x - C2) * C3 < C4. */
2518 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2519 && TREE_CODE (op0
) == SSA_NAME
2520 && TREE_CODE (op1
) == INTEGER_CST
)
2521 code
= maybe_optimize_mod_cmp (code
, &op0
, &op1
);
2523 last2
= last
= get_last_insn ();
2525 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2526 set_curr_insn_location (gimple_location (stmt
));
2528 /* These flags have no purpose in RTL land. */
2529 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2530 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2532 /* We can either have a pure conditional jump with one fallthru edge or
2533 two-way jump that needs to be decomposed into two basic blocks. */
2534 if (false_edge
->dest
== bb
->next_bb
)
2536 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2537 true_edge
->probability
);
2538 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2539 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2540 set_curr_insn_location (true_edge
->goto_locus
);
2541 false_edge
->flags
|= EDGE_FALLTHRU
;
2542 maybe_cleanup_end_of_block (false_edge
, last
);
2545 if (true_edge
->dest
== bb
->next_bb
)
2547 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2548 false_edge
->probability
);
2549 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2550 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2551 set_curr_insn_location (false_edge
->goto_locus
);
2552 true_edge
->flags
|= EDGE_FALLTHRU
;
2553 maybe_cleanup_end_of_block (true_edge
, last
);
2557 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2558 true_edge
->probability
);
2559 last
= get_last_insn ();
2560 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2561 set_curr_insn_location (false_edge
->goto_locus
);
2562 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2565 if (BARRIER_P (BB_END (bb
)))
2566 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2567 update_bb_for_insn (bb
);
2569 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2570 dest
= false_edge
->dest
;
2571 redirect_edge_succ (false_edge
, new_bb
);
2572 false_edge
->flags
|= EDGE_FALLTHRU
;
2573 new_bb
->count
= false_edge
->count ();
2574 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2575 add_bb_to_loop (new_bb
, loop
);
2576 if (loop
->latch
== bb
2577 && loop
->header
== dest
)
2578 loop
->latch
= new_bb
;
2579 make_single_succ_edge (new_bb
, dest
, 0);
2580 if (BARRIER_P (BB_END (new_bb
)))
2581 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2582 update_bb_for_insn (new_bb
);
2584 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2586 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2588 set_curr_insn_location (true_edge
->goto_locus
);
2589 true_edge
->goto_locus
= curr_insn_location ();
2595 /* Mark all calls that can have a transaction restart. */
2598 mark_transaction_restart_calls (gimple
*stmt
)
2600 struct tm_restart_node dummy
;
2601 tm_restart_node
**slot
;
2603 if (!cfun
->gimple_df
->tm_restart
)
2607 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2610 struct tm_restart_node
*n
= *slot
;
2611 tree list
= n
->label_or_list
;
2614 for (insn
= next_real_insn (get_last_insn ());
2616 insn
= next_real_insn (insn
))
2619 if (TREE_CODE (list
) == LABEL_DECL
)
2620 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2622 for (; list
; list
= TREE_CHAIN (list
))
2623 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2627 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2631 expand_call_stmt (gcall
*stmt
)
2633 tree exp
, decl
, lhs
;
2637 if (gimple_call_internal_p (stmt
))
2639 expand_internal_call (stmt
);
2643 /* If this is a call to a built-in function and it has no effect other
2644 than setting the lhs, try to implement it using an internal function
2646 decl
= gimple_call_fndecl (stmt
);
2647 if (gimple_call_lhs (stmt
)
2648 && !gimple_has_side_effects (stmt
)
2649 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2651 internal_fn ifn
= replacement_internal_fn (stmt
);
2652 if (ifn
!= IFN_LAST
)
2654 expand_internal_call (ifn
, stmt
);
2659 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2661 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2662 builtin_p
= decl
&& fndecl_built_in_p (decl
);
2664 /* If this is not a builtin function, the function type through which the
2665 call is made may be different from the type of the function. */
2668 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2669 CALL_EXPR_FN (exp
));
2671 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2672 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2674 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2676 tree arg
= gimple_call_arg (stmt
, i
);
2678 /* TER addresses into arguments of builtin functions so we have a
2679 chance to infer more correct alignment information. See PR39954. */
2681 && TREE_CODE (arg
) == SSA_NAME
2682 && (def
= get_gimple_for_ssa_name (arg
))
2683 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2684 arg
= gimple_assign_rhs1 (def
);
2685 CALL_EXPR_ARG (exp
, i
) = arg
;
2688 if (gimple_has_side_effects (stmt
))
2689 TREE_SIDE_EFFECTS (exp
) = 1;
2691 if (gimple_call_nothrow_p (stmt
))
2692 TREE_NOTHROW (exp
) = 1;
2694 if (gimple_no_warning_p (stmt
))
2695 TREE_NO_WARNING (exp
) = 1;
2697 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2698 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2699 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2701 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
2702 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2703 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2705 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2706 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2707 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2708 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2710 /* Ensure RTL is created for debug args. */
2711 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2713 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2718 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2720 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2721 expand_debug_expr (dtemp
);
2725 rtx_insn
*before_call
= get_last_insn ();
2726 lhs
= gimple_call_lhs (stmt
);
2728 expand_assignment (lhs
, exp
, false);
2730 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2732 /* If the gimple call is an indirect call and has 'nocf_check'
2733 attribute find a generated CALL insn to mark it as no
2734 control-flow verification is needed. */
2735 if (gimple_call_nocf_check_p (stmt
)
2736 && !gimple_call_fndecl (stmt
))
2738 rtx_insn
*last
= get_last_insn ();
2739 while (!CALL_P (last
)
2740 && last
!= before_call
)
2741 last
= PREV_INSN (last
);
2743 if (last
!= before_call
)
2744 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2747 mark_transaction_restart_calls (stmt
);
2751 /* Generate RTL for an asm statement (explicit assembler code).
2752 STRING is a STRING_CST node containing the assembler code text,
2753 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2754 insn is volatile; don't optimize it. */
2757 expand_asm_loc (tree string
, int vol
, location_t locus
)
2761 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2762 ggc_strdup (TREE_STRING_POINTER (string
)),
2765 MEM_VOLATILE_P (body
) = vol
;
2767 /* Non-empty basic ASM implicitly clobbers memory. */
2768 if (TREE_STRING_LENGTH (string
) != 0)
2771 unsigned i
, nclobbers
;
2772 auto_vec
<rtx
> input_rvec
, output_rvec
;
2773 auto_vec
<const char *> constraints
;
2774 auto_vec
<rtx
> clobber_rvec
;
2775 HARD_REG_SET clobbered_regs
;
2776 CLEAR_HARD_REG_SET (clobbered_regs
);
2778 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2779 clobber_rvec
.safe_push (clob
);
2781 if (targetm
.md_asm_adjust
)
2782 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2783 constraints
, clobber_rvec
,
2787 nclobbers
= clobber_rvec
.length ();
2788 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2790 XVECEXP (body
, 0, 0) = asm_op
;
2791 for (i
= 0; i
< nclobbers
; i
++)
2792 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2798 /* Return the number of times character C occurs in string S. */
2800 n_occurrences (int c
, const char *s
)
2808 /* A subroutine of expand_asm_operands. Check that all operands have
2809 the same number of alternatives. Return true if so. */
2812 check_operand_nalternatives (const vec
<const char *> &constraints
)
2814 unsigned len
= constraints
.length();
2817 int nalternatives
= n_occurrences (',', constraints
[0]);
2819 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2821 error ("too many alternatives in %<asm%>");
2825 for (unsigned i
= 1; i
< len
; ++i
)
2826 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2828 error ("operand constraints for %<asm%> differ "
2829 "in number of alternatives");
2836 /* Check for overlap between registers marked in CLOBBERED_REGS and
2837 anything inappropriate in T. Emit error and return the register
2838 variable definition for error, NULL_TREE for ok. */
2841 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2843 /* Conflicts between asm-declared register variables and the clobber
2844 list are not allowed. */
2845 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2849 error ("%<asm%> specifier for variable %qE conflicts with "
2850 "%<asm%> clobber list",
2851 DECL_NAME (overlap
));
2853 /* Reset registerness to stop multiple errors emitted for a single
2855 DECL_REGISTER (overlap
) = 0;
2862 /* Check that the given REGNO spanning NREGS is a valid
2863 asm clobber operand. Some HW registers cannot be
2864 saved/restored, hence they should not be clobbered by
2867 asm_clobber_reg_is_valid (int regno
, int nregs
, const char *regname
)
2869 bool is_valid
= true;
2870 HARD_REG_SET regset
;
2872 CLEAR_HARD_REG_SET (regset
);
2874 add_range_to_hard_reg_set (®set
, regno
, nregs
);
2876 /* Clobbering the PIC register is an error. */
2877 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
2878 && overlaps_hard_reg_set_p (regset
, Pmode
, PIC_OFFSET_TABLE_REGNUM
))
2880 /* ??? Diagnose during gimplification? */
2881 error ("PIC register clobbered by %qs in %<asm%>", regname
);
2884 else if (!in_hard_reg_set_p
2885 (accessible_reg_set
, reg_raw_mode
[regno
], regno
))
2887 /* ??? Diagnose during gimplification? */
2888 error ("the register %qs cannot be clobbered in %<asm%>"
2889 " for the current target", regname
);
2893 /* Clobbering the stack pointer register is deprecated. GCC expects
2894 the value of the stack pointer after an asm statement to be the same
2895 as it was before, so no asm can validly clobber the stack pointer in
2896 the usual sense. Adding the stack pointer to the clobber list has
2897 traditionally had some undocumented and somewhat obscure side-effects. */
2898 if (overlaps_hard_reg_set_p (regset
, Pmode
, STACK_POINTER_REGNUM
)
2899 && warning (OPT_Wdeprecated
, "listing the stack pointer register"
2900 " %qs in a clobber list is deprecated", regname
))
2901 inform (input_location
, "the value of the stack pointer after an %<asm%>"
2902 " statement must be the same as it was before the statement");
2907 /* Generate RTL for an asm statement with arguments.
2908 STRING is the instruction template.
2909 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2910 Each output or input has an expression in the TREE_VALUE and
2911 a tree list in TREE_PURPOSE which in turn contains a constraint
2912 name in TREE_VALUE (or NULL_TREE) and a constraint string
2914 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2915 that is clobbered by this insn.
2917 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2918 should be the fallthru basic block of the asm goto.
2920 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2921 Some elements of OUTPUTS may be replaced with trees representing temporary
2922 values. The caller should copy those temporary values to the originally
2925 VOL nonzero means the insn is volatile; don't optimize it. */
2928 expand_asm_stmt (gasm
*stmt
)
2930 class save_input_location
2935 explicit save_input_location(location_t where
)
2937 old
= input_location
;
2938 input_location
= where
;
2941 ~save_input_location()
2943 input_location
= old
;
2947 location_t locus
= gimple_location (stmt
);
2949 if (gimple_asm_input_p (stmt
))
2951 const char *s
= gimple_asm_string (stmt
);
2952 tree string
= build_string (strlen (s
), s
);
2953 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2957 /* There are some legacy diagnostics in here, and also avoids a
2958 sixth parameger to targetm.md_asm_adjust. */
2959 save_input_location
s_i_l(locus
);
2961 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2962 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2963 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2966 /* ??? Diagnose during gimplification? */
2967 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2969 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2973 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2974 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2975 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2977 /* Copy the gimple vectors into new vectors that we can manipulate. */
2979 output_tvec
.safe_grow (noutputs
);
2980 input_tvec
.safe_grow (ninputs
);
2981 constraints
.safe_grow (noutputs
+ ninputs
);
2983 for (i
= 0; i
< noutputs
; ++i
)
2985 tree t
= gimple_asm_output_op (stmt
, i
);
2986 output_tvec
[i
] = TREE_VALUE (t
);
2987 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2989 for (i
= 0; i
< ninputs
; i
++)
2991 tree t
= gimple_asm_input_op (stmt
, i
);
2992 input_tvec
[i
] = TREE_VALUE (t
);
2993 constraints
[i
+ noutputs
]
2994 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2997 /* ??? Diagnose during gimplification? */
2998 if (! check_operand_nalternatives (constraints
))
3001 /* Count the number of meaningful clobbered registers, ignoring what
3002 we would ignore later. */
3003 auto_vec
<rtx
> clobber_rvec
;
3004 HARD_REG_SET clobbered_regs
;
3005 CLEAR_HARD_REG_SET (clobbered_regs
);
3007 if (unsigned n
= gimple_asm_nclobbers (stmt
))
3009 clobber_rvec
.reserve (n
);
3010 for (i
= 0; i
< n
; i
++)
3012 tree t
= gimple_asm_clobber_op (stmt
, i
);
3013 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
3016 j
= decode_reg_name_and_count (regname
, &nregs
);
3021 /* ??? Diagnose during gimplification? */
3022 error ("unknown register name %qs in %<asm%>", regname
);
3026 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
3027 clobber_rvec
.safe_push (x
);
3031 /* Otherwise we should have -1 == empty string
3032 or -3 == cc, which is not a register. */
3033 gcc_assert (j
== -1 || j
== -3);
3037 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
3039 if (!asm_clobber_reg_is_valid (reg
, nregs
, regname
))
3042 SET_HARD_REG_BIT (clobbered_regs
, reg
);
3043 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
3044 clobber_rvec
.safe_push (x
);
3049 /* First pass over inputs and outputs checks validity and sets
3050 mark_addressable if needed. */
3051 /* ??? Diagnose during gimplification? */
3053 for (i
= 0; i
< noutputs
; ++i
)
3055 tree val
= output_tvec
[i
];
3056 tree type
= TREE_TYPE (val
);
3057 const char *constraint
;
3062 /* Try to parse the output constraint. If that fails, there's
3063 no point in going further. */
3064 constraint
= constraints
[i
];
3065 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3066 &allows_mem
, &allows_reg
, &is_inout
))
3069 /* If the output is a hard register, verify it doesn't conflict with
3070 any other operand's possible hard register use. */
3072 && REG_P (DECL_RTL (val
))
3073 && HARD_REGISTER_P (DECL_RTL (val
)))
3075 unsigned j
, output_hregno
= REGNO (DECL_RTL (val
));
3076 bool early_clobber_p
= strchr (constraints
[i
], '&') != NULL
;
3077 unsigned long match
;
3079 /* Verify the other outputs do not use the same hard register. */
3080 for (j
= i
+ 1; j
< noutputs
; ++j
)
3081 if (DECL_P (output_tvec
[j
])
3082 && REG_P (DECL_RTL (output_tvec
[j
]))
3083 && HARD_REGISTER_P (DECL_RTL (output_tvec
[j
]))
3084 && output_hregno
== REGNO (DECL_RTL (output_tvec
[j
])))
3085 error ("invalid hard register usage between output operands");
3087 /* Verify matching constraint operands use the same hard register
3088 and that the non-matching constraint operands do not use the same
3089 hard register if the output is an early clobber operand. */
3090 for (j
= 0; j
< ninputs
; ++j
)
3091 if (DECL_P (input_tvec
[j
])
3092 && REG_P (DECL_RTL (input_tvec
[j
]))
3093 && HARD_REGISTER_P (DECL_RTL (input_tvec
[j
])))
3095 unsigned input_hregno
= REGNO (DECL_RTL (input_tvec
[j
]));
3096 switch (*constraints
[j
+ noutputs
])
3098 case '0': case '1': case '2': case '3': case '4':
3099 case '5': case '6': case '7': case '8': case '9':
3100 match
= strtoul (constraints
[j
+ noutputs
], NULL
, 10);
3107 && output_hregno
!= input_hregno
)
3108 error ("invalid hard register usage between output operand "
3109 "and matching constraint operand");
3110 else if (early_clobber_p
3112 && output_hregno
== input_hregno
)
3113 error ("invalid hard register usage between earlyclobber "
3114 "operand and input operand");
3122 && REG_P (DECL_RTL (val
))
3123 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3124 mark_addressable (val
);
3127 for (i
= 0; i
< ninputs
; ++i
)
3129 bool allows_reg
, allows_mem
;
3130 const char *constraint
;
3132 constraint
= constraints
[i
+ noutputs
];
3133 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3134 constraints
.address (),
3135 &allows_mem
, &allows_reg
))
3138 if (! allows_reg
&& allows_mem
)
3139 mark_addressable (input_tvec
[i
]);
3142 /* Second pass evaluates arguments. */
3144 /* Make sure stack is consistent for asm goto. */
3146 do_pending_stack_adjust ();
3147 int old_generating_concat_p
= generating_concat_p
;
3149 /* Vector of RTX's of evaluated output operands. */
3150 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3151 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3152 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3154 output_rvec
.safe_grow (noutputs
);
3156 for (i
= 0; i
< noutputs
; ++i
)
3158 tree val
= output_tvec
[i
];
3159 tree type
= TREE_TYPE (val
);
3160 bool is_inout
, allows_reg
, allows_mem
, ok
;
3163 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3164 noutputs
, &allows_mem
, &allows_reg
,
3168 /* If an output operand is not a decl or indirect ref and our constraint
3169 allows a register, make a temporary to act as an intermediate.
3170 Make the asm insn write into that, then we will copy it to
3171 the real output operand. Likewise for promoted variables. */
3173 generating_concat_p
= 0;
3175 if ((TREE_CODE (val
) == INDIRECT_REF
&& allows_mem
)
3177 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3178 && ! (REG_P (DECL_RTL (val
))
3179 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3182 || TREE_ADDRESSABLE (type
))
3184 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3185 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3187 op
= validize_mem (op
);
3189 if (! allows_reg
&& !MEM_P (op
))
3190 error ("output number %d not directly addressable", i
);
3191 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3192 || GET_CODE (op
) == CONCAT
)
3195 op
= gen_reg_rtx (GET_MODE (op
));
3197 generating_concat_p
= old_generating_concat_p
;
3200 emit_move_insn (op
, old_op
);
3202 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3203 emit_move_insn (old_op
, op
);
3204 after_rtl_seq
= get_insns ();
3205 after_rtl_end
= get_last_insn ();
3211 op
= assign_temp (type
, 0, 1);
3212 op
= validize_mem (op
);
3213 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3214 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3216 generating_concat_p
= old_generating_concat_p
;
3218 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3219 expand_assignment (val
, make_tree (type
, op
), false);
3220 after_rtl_seq
= get_insns ();
3221 after_rtl_end
= get_last_insn ();
3224 output_rvec
[i
] = op
;
3227 inout_opnum
.safe_push (i
);
3230 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3231 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3233 input_rvec
.safe_grow (ninputs
);
3234 input_mode
.safe_grow (ninputs
);
3236 generating_concat_p
= 0;
3238 for (i
= 0; i
< ninputs
; ++i
)
3240 tree val
= input_tvec
[i
];
3241 tree type
= TREE_TYPE (val
);
3242 bool allows_reg
, allows_mem
, ok
;
3243 const char *constraint
;
3246 constraint
= constraints
[i
+ noutputs
];
3247 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3248 constraints
.address (),
3249 &allows_mem
, &allows_reg
);
3252 /* EXPAND_INITIALIZER will not generate code for valid initializer
3253 constants, but will still generate code for other types of operand.
3254 This is the behavior we want for constant constraints. */
3255 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3256 allows_reg
? EXPAND_NORMAL
3257 : allows_mem
? EXPAND_MEMORY
3258 : EXPAND_INITIALIZER
);
3260 /* Never pass a CONCAT to an ASM. */
3261 if (GET_CODE (op
) == CONCAT
)
3262 op
= force_reg (GET_MODE (op
), op
);
3263 else if (MEM_P (op
))
3264 op
= validize_mem (op
);
3266 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3268 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3269 op
= force_reg (TYPE_MODE (type
), op
);
3270 else if (!allows_mem
)
3271 warning (0, "%<asm%> operand %d probably does not match "
3274 else if (MEM_P (op
))
3276 /* We won't recognize either volatile memory or memory
3277 with a queued address as available a memory_operand
3278 at this point. Ignore it: clearly this *is* a memory. */
3284 input_mode
[i
] = TYPE_MODE (type
);
3287 /* For in-out operands, copy output rtx to input rtx. */
3288 unsigned ninout
= inout_opnum
.length();
3289 for (i
= 0; i
< ninout
; i
++)
3291 int j
= inout_opnum
[i
];
3292 rtx o
= output_rvec
[j
];
3294 input_rvec
.safe_push (o
);
3295 input_mode
.safe_push (GET_MODE (o
));
3298 sprintf (buffer
, "%d", j
);
3299 constraints
.safe_push (ggc_strdup (buffer
));
3303 /* Sometimes we wish to automatically clobber registers across an asm.
3304 Case in point is when the i386 backend moved from cc0 to a hard reg --
3305 maintaining source-level compatibility means automatically clobbering
3306 the flags register. */
3307 rtx_insn
*after_md_seq
= NULL
;
3308 if (targetm
.md_asm_adjust
)
3309 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3310 constraints
, clobber_rvec
,
3313 /* Do not allow the hook to change the output and input count,
3314 lest it mess up the operand numbering. */
3315 gcc_assert (output_rvec
.length() == noutputs
);
3316 gcc_assert (input_rvec
.length() == ninputs
);
3317 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3319 /* But it certainly can adjust the clobbers. */
3320 unsigned nclobbers
= clobber_rvec
.length ();
3322 /* Third pass checks for easy conflicts. */
3323 /* ??? Why are we doing this on trees instead of rtx. */
3325 bool clobber_conflict_found
= 0;
3326 for (i
= 0; i
< noutputs
; ++i
)
3327 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3328 clobber_conflict_found
= 1;
3329 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3330 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3331 clobber_conflict_found
= 1;
3333 /* Make vectors for the expression-rtx, constraint strings,
3334 and named operands. */
3336 rtvec argvec
= rtvec_alloc (ninputs
);
3337 rtvec constraintvec
= rtvec_alloc (ninputs
);
3338 rtvec labelvec
= rtvec_alloc (nlabels
);
3340 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3341 : GET_MODE (output_rvec
[0])),
3342 ggc_strdup (gimple_asm_string (stmt
)),
3343 "", 0, argvec
, constraintvec
,
3345 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3347 for (i
= 0; i
< ninputs
; ++i
)
3349 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3350 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3351 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3352 constraints
[i
+ noutputs
],
3356 /* Copy labels to the vector. */
3357 rtx_code_label
*fallthru_label
= NULL
;
3360 basic_block fallthru_bb
= NULL
;
3361 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3363 fallthru_bb
= fallthru
->dest
;
3365 for (i
= 0; i
< nlabels
; ++i
)
3367 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3369 /* If asm goto has any labels in the fallthru basic block, use
3370 a label that we emit immediately after the asm goto. Expansion
3371 may insert further instructions into the same basic block after
3372 asm goto and if we don't do this, insertion of instructions on
3373 the fallthru edge might misbehave. See PR58670. */
3374 if (fallthru_bb
&& label_to_block (cfun
, label
) == fallthru_bb
)
3376 if (fallthru_label
== NULL_RTX
)
3377 fallthru_label
= gen_label_rtx ();
3381 r
= label_rtx (label
);
3382 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3386 /* Now, for each output, construct an rtx
3387 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3388 ARGVEC CONSTRAINTS OPNAMES))
3389 If there is more than one, put them inside a PARALLEL. */
3391 if (nlabels
> 0 && nclobbers
== 0)
3393 gcc_assert (noutputs
== 0);
3394 emit_jump_insn (body
);
3396 else if (noutputs
== 0 && nclobbers
== 0)
3398 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3401 else if (noutputs
== 1 && nclobbers
== 0)
3403 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3404 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3414 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3416 /* For each output operand, store a SET. */
3417 for (i
= 0; i
< noutputs
; ++i
)
3419 rtx src
, o
= output_rvec
[i
];
3422 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3427 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3428 ASM_OPERANDS_TEMPLATE (obody
),
3429 constraints
[i
], i
, argvec
,
3430 constraintvec
, labelvec
, locus
);
3431 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3433 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3436 /* If there are no outputs (but there are some clobbers)
3437 store the bare ASM_OPERANDS into the PARALLEL. */
3439 XVECEXP (body
, 0, i
++) = obody
;
3441 /* Store (clobber REG) for each clobbered register specified. */
3442 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3444 rtx clobbered_reg
= clobber_rvec
[j
];
3446 /* Do sanity check for overlap between clobbers and respectively
3447 input and outputs that hasn't been handled. Such overlap
3448 should have been detected and reported above. */
3449 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3451 /* We test the old body (obody) contents to avoid
3452 tripping over the under-construction body. */
3453 for (unsigned k
= 0; k
< noutputs
; ++k
)
3454 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3455 internal_error ("%<asm%> clobber conflict with "
3458 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3459 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3460 internal_error ("%<asm%> clobber conflict with "
3464 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3468 emit_jump_insn (body
);
3473 generating_concat_p
= old_generating_concat_p
;
3476 emit_label (fallthru_label
);
3479 emit_insn (after_md_seq
);
3481 emit_insn (after_rtl_seq
);
3484 crtl
->has_asm_statement
= 1;
3487 /* Emit code to jump to the address
3488 specified by the pointer expression EXP. */
3491 expand_computed_goto (tree exp
)
3493 rtx x
= expand_normal (exp
);
3495 do_pending_stack_adjust ();
3496 emit_indirect_jump (x
);
3499 /* Generate RTL code for a `goto' statement with target label LABEL.
3500 LABEL should be a LABEL_DECL tree node that was or will later be
3501 defined with `expand_label'. */
3504 expand_goto (tree label
)
3508 /* Check for a nonlocal goto to a containing function. Should have
3509 gotten translated to __builtin_nonlocal_goto. */
3510 tree context
= decl_function_context (label
);
3511 gcc_assert (!context
|| context
== current_function_decl
);
3514 emit_jump (jump_target_rtx (label
));
3517 /* Output a return with no value. */
3520 expand_null_return_1 (void)
3522 clear_pending_stack_adjust ();
3523 do_pending_stack_adjust ();
3524 emit_jump (return_label
);
3527 /* Generate RTL to return from the current function, with no value.
3528 (That is, we do not do anything about returning any value.) */
3531 expand_null_return (void)
3533 /* If this function was declared to return a value, but we
3534 didn't, clobber the return registers so that they are not
3535 propagated live to the rest of the function. */
3536 clobber_return_register ();
3538 expand_null_return_1 ();
3541 /* Generate RTL to return from the current function, with value VAL. */
3544 expand_value_return (rtx val
)
3546 /* Copy the value to the return location unless it's already there. */
3548 tree decl
= DECL_RESULT (current_function_decl
);
3549 rtx return_reg
= DECL_RTL (decl
);
3550 if (return_reg
!= val
)
3552 tree funtype
= TREE_TYPE (current_function_decl
);
3553 tree type
= TREE_TYPE (decl
);
3554 int unsignedp
= TYPE_UNSIGNED (type
);
3555 machine_mode old_mode
= DECL_MODE (decl
);
3557 if (DECL_BY_REFERENCE (decl
))
3558 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3560 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3562 if (mode
!= old_mode
)
3563 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3565 if (GET_CODE (return_reg
) == PARALLEL
)
3566 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3568 emit_move_insn (return_reg
, val
);
3571 expand_null_return_1 ();
3574 /* Generate RTL to evaluate the expression RETVAL and return it
3575 from the current function. */
3578 expand_return (tree retval
)
3584 /* If function wants no value, give it none. */
3585 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3587 expand_normal (retval
);
3588 expand_null_return ();
3592 if (retval
== error_mark_node
)
3594 /* Treat this like a return of no value from a function that
3596 expand_null_return ();
3599 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3600 || TREE_CODE (retval
) == INIT_EXPR
)
3601 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3602 retval_rhs
= TREE_OPERAND (retval
, 1);
3604 retval_rhs
= retval
;
3606 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3608 /* If we are returning the RESULT_DECL, then the value has already
3609 been stored into it, so we don't have to do anything special. */
3610 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3611 expand_value_return (result_rtl
);
3613 /* If the result is an aggregate that is being returned in one (or more)
3614 registers, load the registers here. */
3616 else if (retval_rhs
!= 0
3617 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3618 && REG_P (result_rtl
))
3620 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3623 /* Use the mode of the result value on the return register. */
3624 PUT_MODE (result_rtl
, GET_MODE (val
));
3625 expand_value_return (val
);
3628 expand_null_return ();
3630 else if (retval_rhs
!= 0
3631 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3632 && (REG_P (result_rtl
)
3633 || (GET_CODE (result_rtl
) == PARALLEL
)))
3635 /* Compute the return value into a temporary (usually a pseudo reg). */
3637 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3638 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3639 val
= force_not_mem (val
);
3640 expand_value_return (val
);
3644 /* No hard reg used; calculate value into hard return reg. */
3645 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3646 expand_value_return (result_rtl
);
3650 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3651 register, tell the rtl optimizers that its value is no longer
3655 expand_clobber (tree lhs
)
3659 rtx decl_rtl
= DECL_RTL_IF_SET (lhs
);
3660 if (decl_rtl
&& REG_P (decl_rtl
))
3662 machine_mode decl_mode
= GET_MODE (decl_rtl
);
3663 if (maybe_gt (GET_MODE_SIZE (decl_mode
),
3664 REGMODE_NATURAL_SIZE (decl_mode
)))
3665 emit_clobber (decl_rtl
);
3670 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3671 STMT that doesn't require special handling for outgoing edges. That
3672 is no tailcalls and no GIMPLE_COND. */
3675 expand_gimple_stmt_1 (gimple
*stmt
)
3679 set_curr_insn_location (gimple_location (stmt
));
3681 switch (gimple_code (stmt
))
3684 op0
= gimple_goto_dest (stmt
);
3685 if (TREE_CODE (op0
) == LABEL_DECL
)
3688 expand_computed_goto (op0
);
3691 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3694 case GIMPLE_PREDICT
:
3698 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3699 if (gimple_switch_num_labels (swtch
) == 1)
3700 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3702 expand_case (swtch
);
3706 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3709 expand_call_stmt (as_a
<gcall
*> (stmt
));
3714 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3716 /* If a return doesn't have a location, it very likely represents
3717 multiple user returns so we cannot let it inherit the location
3718 of the last statement of the previous basic block in RTL. */
3719 if (!gimple_has_location (stmt
))
3720 set_curr_insn_location (cfun
->function_end_locus
);
3722 if (op0
&& op0
!= error_mark_node
)
3724 tree result
= DECL_RESULT (current_function_decl
);
3726 /* If we are not returning the current function's RESULT_DECL,
3727 build an assignment to it. */
3730 /* I believe that a function's RESULT_DECL is unique. */
3731 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3733 /* ??? We'd like to use simply expand_assignment here,
3734 but this fails if the value is of BLKmode but the return
3735 decl is a register. expand_return has special handling
3736 for this combination, which eventually should move
3737 to common code. See comments there. Until then, let's
3738 build a modify expression :-/ */
3739 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3745 expand_null_return ();
3747 expand_return (op0
);
3753 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3754 tree lhs
= gimple_assign_lhs (assign_stmt
);
3756 /* Tree expand used to fiddle with |= and &= of two bitfield
3757 COMPONENT_REFs here. This can't happen with gimple, the LHS
3758 of binary assigns must be a gimple reg. */
3760 if (TREE_CODE (lhs
) != SSA_NAME
3761 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3762 == GIMPLE_SINGLE_RHS
)
3764 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3765 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3766 == GIMPLE_SINGLE_RHS
);
3767 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3768 /* Do not put locations on possibly shared trees. */
3769 && !is_gimple_min_invariant (rhs
))
3770 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3771 if (TREE_CLOBBER_P (rhs
))
3772 /* This is a clobber to mark the going out of scope for
3774 expand_clobber (lhs
);
3776 expand_assignment (lhs
, rhs
,
3777 gimple_assign_nontemporal_move_p (
3783 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3784 struct separate_ops ops
;
3785 bool promoted
= false;
3787 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3788 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3791 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3792 ops
.type
= TREE_TYPE (lhs
);
3793 switch (get_gimple_rhs_class (ops
.code
))
3795 case GIMPLE_TERNARY_RHS
:
3796 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3798 case GIMPLE_BINARY_RHS
:
3799 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3801 case GIMPLE_UNARY_RHS
:
3802 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3807 ops
.location
= gimple_location (stmt
);
3809 /* If we want to use a nontemporal store, force the value to
3810 register first. If we store into a promoted register,
3811 don't directly expand to target. */
3812 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3813 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3820 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3821 /* If TEMP is a VOIDmode constant, use convert_modes to make
3822 sure that we properly convert it. */
3823 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3825 temp
= convert_modes (GET_MODE (target
),
3826 TYPE_MODE (ops
.type
),
3828 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3829 GET_MODE (target
), temp
, unsignedp
);
3832 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3834 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3838 temp
= force_operand (temp
, target
);
3840 emit_move_insn (target
, temp
);
3851 /* Expand one gimple statement STMT and return the last RTL instruction
3852 before any of the newly generated ones.
3854 In addition to generating the necessary RTL instructions this also
3855 sets REG_EH_REGION notes if necessary and sets the current source
3856 location for diagnostics. */
3859 expand_gimple_stmt (gimple
*stmt
)
3861 location_t saved_location
= input_location
;
3862 rtx_insn
*last
= get_last_insn ();
3867 /* We need to save and restore the current source location so that errors
3868 discovered during expansion are emitted with the right location. But
3869 it would be better if the diagnostic routines used the source location
3870 embedded in the tree nodes rather than globals. */
3871 if (gimple_has_location (stmt
))
3872 input_location
= gimple_location (stmt
);
3874 expand_gimple_stmt_1 (stmt
);
3876 /* Free any temporaries used to evaluate this statement. */
3879 input_location
= saved_location
;
3881 /* Mark all insns that may trap. */
3882 lp_nr
= lookup_stmt_eh_lp (stmt
);
3886 for (insn
= next_real_insn (last
); insn
;
3887 insn
= next_real_insn (insn
))
3889 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3890 /* If we want exceptions for non-call insns, any
3891 may_trap_p instruction may throw. */
3892 && GET_CODE (PATTERN (insn
)) != CLOBBER
3893 && GET_CODE (PATTERN (insn
)) != USE
3894 && insn_could_throw_p (insn
))
3895 make_reg_eh_region_note (insn
, 0, lp_nr
);
3902 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3903 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3904 generated a tail call (something that might be denied by the ABI
3905 rules governing the call; see calls.c).
3907 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3908 can still reach the rest of BB. The case here is __builtin_sqrt,
3909 where the NaN result goes through the external function (with a
3910 tailcall) and the normal result happens via a sqrt instruction. */
3913 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3915 rtx_insn
*last2
, *last
;
3918 profile_probability probability
;
3920 last2
= last
= expand_gimple_stmt (stmt
);
3922 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3923 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3926 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3928 *can_fallthru
= true;
3932 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3933 Any instructions emitted here are about to be deleted. */
3934 do_pending_stack_adjust ();
3936 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3937 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3938 EH or abnormal edges, we shouldn't have created a tail call in
3939 the first place. So it seems to me we should just be removing
3940 all edges here, or redirecting the existing fallthru edge to
3943 probability
= profile_probability::never ();
3945 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3947 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3949 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3950 e
->dest
->count
-= e
->count ();
3951 probability
+= e
->probability
;
3958 /* This is somewhat ugly: the call_expr expander often emits instructions
3959 after the sibcall (to perform the function return). These confuse the
3960 find_many_sub_basic_blocks code, so we need to get rid of these. */
3961 last
= NEXT_INSN (last
);
3962 gcc_assert (BARRIER_P (last
));
3964 *can_fallthru
= false;
3965 while (NEXT_INSN (last
))
3967 /* For instance an sqrt builtin expander expands if with
3968 sibcall in the then and label for `else`. */
3969 if (LABEL_P (NEXT_INSN (last
)))
3971 *can_fallthru
= true;
3974 delete_insn (NEXT_INSN (last
));
3977 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3979 e
->probability
= probability
;
3981 update_bb_for_insn (bb
);
3983 if (NEXT_INSN (last
))
3985 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3988 if (BARRIER_P (last
))
3989 BB_END (bb
) = PREV_INSN (last
);
3992 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3997 /* Return the difference between the floor and the truncated result of
3998 a signed division by OP1 with remainder MOD. */
4000 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4002 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
4003 return gen_rtx_IF_THEN_ELSE
4004 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4005 gen_rtx_IF_THEN_ELSE
4006 (mode
, gen_rtx_LT (BImode
,
4007 gen_rtx_DIV (mode
, op1
, mod
),
4009 constm1_rtx
, const0_rtx
),
4013 /* Return the difference between the ceil and the truncated result of
4014 a signed division by OP1 with remainder MOD. */
4016 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4018 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4019 return gen_rtx_IF_THEN_ELSE
4020 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4021 gen_rtx_IF_THEN_ELSE
4022 (mode
, gen_rtx_GT (BImode
,
4023 gen_rtx_DIV (mode
, op1
, mod
),
4025 const1_rtx
, const0_rtx
),
4029 /* Return the difference between the ceil and the truncated result of
4030 an unsigned division by OP1 with remainder MOD. */
4032 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
4034 /* (mod != 0 ? 1 : 0) */
4035 return gen_rtx_IF_THEN_ELSE
4036 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4037 const1_rtx
, const0_rtx
);
4040 /* Return the difference between the rounded and the truncated result
4041 of a signed division by OP1 with remainder MOD. Halfway cases are
4042 rounded away from zero, rather than to the nearest even number. */
4044 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4046 /* (abs (mod) >= abs (op1) - abs (mod)
4047 ? (op1 / mod > 0 ? 1 : -1)
4049 return gen_rtx_IF_THEN_ELSE
4050 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
4051 gen_rtx_MINUS (mode
,
4052 gen_rtx_ABS (mode
, op1
),
4053 gen_rtx_ABS (mode
, mod
))),
4054 gen_rtx_IF_THEN_ELSE
4055 (mode
, gen_rtx_GT (BImode
,
4056 gen_rtx_DIV (mode
, op1
, mod
),
4058 const1_rtx
, constm1_rtx
),
4062 /* Return the difference between the rounded and the truncated result
4063 of a unsigned division by OP1 with remainder MOD. Halfway cases
4064 are rounded away from zero, rather than to the nearest even
4067 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4069 /* (mod >= op1 - mod ? 1 : 0) */
4070 return gen_rtx_IF_THEN_ELSE
4071 (mode
, gen_rtx_GE (BImode
, mod
,
4072 gen_rtx_MINUS (mode
, op1
, mod
)),
4073 const1_rtx
, const0_rtx
);
4076 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4080 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4083 #ifndef POINTERS_EXTEND_UNSIGNED
4084 gcc_assert (mode
== Pmode
4085 || mode
== targetm
.addr_space
.address_mode (as
));
4086 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4090 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4092 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4095 /* X must have some form of address mode already. */
4096 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4097 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4098 x
= lowpart_subreg (mode
, x
, xmode
);
4099 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4100 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4101 else if (!POINTERS_EXTEND_UNSIGNED
)
4102 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4105 switch (GET_CODE (x
))
4108 if ((SUBREG_PROMOTED_VAR_P (x
)
4109 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4110 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4111 && REG_P (XEXP (SUBREG_REG (x
), 0))
4112 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4113 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4114 && GET_MODE (SUBREG_REG (x
)) == mode
)
4115 return SUBREG_REG (x
);
4118 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4119 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4122 temp
= shallow_copy_rtx (x
);
4123 PUT_MODE (temp
, mode
);
4126 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4128 temp
= gen_rtx_CONST (mode
, temp
);
4132 if (CONST_INT_P (XEXP (x
, 1)))
4134 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4136 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4142 /* Don't know how to express ptr_extend as operation in debug info. */
4145 #endif /* POINTERS_EXTEND_UNSIGNED */
4150 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4151 by avoid_deep_ter_for_debug. */
4153 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4155 /* Split too deep TER chains for debug stmts using debug temporaries. */
4158 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4160 use_operand_p use_p
;
4162 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4164 tree use
= USE_FROM_PTR (use_p
);
4165 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4167 gimple
*g
= get_gimple_for_ssa_name (use
);
4170 if (depth
> 6 && !stmt_ends_bb_p (g
))
4172 if (deep_ter_debug_map
== NULL
)
4173 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4175 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4178 vexpr
= make_node (DEBUG_EXPR_DECL
);
4179 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4180 DECL_ARTIFICIAL (vexpr
) = 1;
4181 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4182 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4183 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4184 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4185 avoid_deep_ter_for_debug (def_temp
, 0);
4188 avoid_deep_ter_for_debug (g
, depth
+ 1);
4192 /* Return an RTX equivalent to the value of the parameter DECL. */
4195 expand_debug_parm_decl (tree decl
)
4197 rtx incoming
= DECL_INCOMING_RTL (decl
);
4200 && GET_MODE (incoming
) != BLKmode
4201 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4202 || (MEM_P (incoming
)
4203 && REG_P (XEXP (incoming
, 0))
4204 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4206 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4208 #ifdef HAVE_window_save
4209 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4210 If the target machine has an explicit window save instruction, the
4211 actual entry value is the corresponding OUTGOING_REGNO instead. */
4212 if (REG_P (incoming
)
4213 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4215 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4216 OUTGOING_REGNO (REGNO (incoming
)), 0);
4217 else if (MEM_P (incoming
))
4219 rtx reg
= XEXP (incoming
, 0);
4220 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4222 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4223 incoming
= replace_equiv_address_nv (incoming
, reg
);
4226 incoming
= copy_rtx (incoming
);
4230 ENTRY_VALUE_EXP (rtl
) = incoming
;
4235 && GET_MODE (incoming
) != BLKmode
4236 && !TREE_ADDRESSABLE (decl
)
4238 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4239 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4240 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4241 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4242 return copy_rtx (incoming
);
4247 /* Return an RTX equivalent to the value of the tree expression EXP. */
4250 expand_debug_expr (tree exp
)
4252 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4253 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4254 machine_mode inner_mode
= VOIDmode
;
4255 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4257 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4259 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4261 case tcc_expression
:
4262 switch (TREE_CODE (exp
))
4267 case WIDEN_MULT_PLUS_EXPR
:
4268 case WIDEN_MULT_MINUS_EXPR
:
4271 case TRUTH_ANDIF_EXPR
:
4272 case TRUTH_ORIF_EXPR
:
4273 case TRUTH_AND_EXPR
:
4275 case TRUTH_XOR_EXPR
:
4278 case TRUTH_NOT_EXPR
:
4287 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4294 if (mode
== BLKmode
)
4296 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4299 switch (TREE_CODE (exp
))
4305 case WIDEN_LSHIFT_EXPR
:
4306 /* Ensure second operand isn't wider than the first one. */
4307 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4308 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4309 && (GET_MODE_UNIT_PRECISION (mode
)
4310 < GET_MODE_PRECISION (op1_mode
)))
4311 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4320 if (mode
== BLKmode
)
4322 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4323 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4328 case tcc_comparison
:
4329 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4337 case tcc_exceptional
:
4338 case tcc_declaration
:
4344 switch (TREE_CODE (exp
))
4347 if (!lookup_constant_def (exp
))
4349 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4350 != (size_t) TREE_STRING_LENGTH (exp
))
4352 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4353 op0
= gen_rtx_MEM (BLKmode
, op0
);
4354 set_mem_attributes (op0
, exp
, 0);
4362 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4366 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4369 gcc_assert (COMPLEX_MODE_P (mode
));
4370 op0
= expand_debug_expr (TREE_REALPART (exp
));
4371 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4372 return gen_rtx_CONCAT (mode
, op0
, op1
);
4374 case DEBUG_EXPR_DECL
:
4375 op0
= DECL_RTL_IF_SET (exp
);
4380 op0
= gen_rtx_DEBUG_EXPR (mode
);
4381 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4382 SET_DECL_RTL (exp
, op0
);
4392 op0
= DECL_RTL_IF_SET (exp
);
4394 /* This decl was probably optimized away. */
4396 /* At least label RTXen are sometimes replaced by
4397 NOTE_INSN_DELETED_LABEL. Any notes here are not
4398 handled by copy_rtx. */
4402 || DECL_EXTERNAL (exp
)
4403 || !TREE_STATIC (exp
)
4405 || DECL_HARD_REGISTER (exp
)
4406 || DECL_IN_CONSTANT_POOL (exp
)
4407 || mode
== VOIDmode
)
4410 op0
= make_decl_rtl_for_debug (exp
);
4412 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4413 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4417 op0
= copy_rtx (op0
);
4419 if (GET_MODE (op0
) == BLKmode
4420 /* If op0 is not BLKmode, but mode is, adjust_mode
4421 below would ICE. While it is likely a FE bug,
4422 try to be robust here. See PR43166. */
4424 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4426 gcc_assert (MEM_P (op0
));
4427 op0
= adjust_address_nv (op0
, mode
, 0);
4437 inner_mode
= GET_MODE (op0
);
4439 if (mode
== inner_mode
)
4442 if (inner_mode
== VOIDmode
)
4444 if (TREE_CODE (exp
) == SSA_NAME
)
4445 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4447 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4448 if (mode
== inner_mode
)
4452 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4454 if (GET_MODE_UNIT_BITSIZE (mode
)
4455 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4456 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4457 else if (GET_MODE_UNIT_BITSIZE (mode
)
4458 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4459 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4461 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4463 else if (FLOAT_MODE_P (mode
))
4465 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4466 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4467 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4469 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4471 else if (FLOAT_MODE_P (inner_mode
))
4474 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4476 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4478 else if (GET_MODE_UNIT_PRECISION (mode
)
4479 == GET_MODE_UNIT_PRECISION (inner_mode
))
4480 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4481 else if (GET_MODE_UNIT_PRECISION (mode
)
4482 < GET_MODE_UNIT_PRECISION (inner_mode
))
4483 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4484 else if (UNARY_CLASS_P (exp
)
4485 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4487 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4489 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4495 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4497 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4498 TREE_OPERAND (exp
, 0),
4499 TREE_OPERAND (exp
, 1));
4501 return expand_debug_expr (newexp
);
4505 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4506 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4510 if (TREE_CODE (exp
) == MEM_REF
)
4512 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4513 || (GET_CODE (op0
) == PLUS
4514 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4515 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4516 Instead just use get_inner_reference. */
4519 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4521 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4524 op0
= plus_constant (inner_mode
, op0
, offset
);
4527 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4529 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4531 if (op0
== NULL_RTX
)
4534 op0
= gen_rtx_MEM (mode
, op0
);
4535 set_mem_attributes (op0
, exp
, 0);
4536 if (TREE_CODE (exp
) == MEM_REF
4537 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4538 set_mem_expr (op0
, NULL_TREE
);
4539 set_mem_addr_space (op0
, as
);
4543 case TARGET_MEM_REF
:
4544 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4545 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4548 op0
= expand_debug_expr
4549 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4553 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4554 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4556 if (op0
== NULL_RTX
)
4559 op0
= gen_rtx_MEM (mode
, op0
);
4561 set_mem_attributes (op0
, exp
, 0);
4562 set_mem_addr_space (op0
, as
);
4568 case ARRAY_RANGE_REF
:
4573 case VIEW_CONVERT_EXPR
:
4576 poly_int64 bitsize
, bitpos
;
4578 int reversep
, volatilep
= 0;
4580 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4581 &unsignedp
, &reversep
, &volatilep
);
4584 if (known_eq (bitsize
, 0))
4587 orig_op0
= op0
= expand_debug_expr (tem
);
4594 machine_mode addrmode
, offmode
;
4599 op0
= XEXP (op0
, 0);
4600 addrmode
= GET_MODE (op0
);
4601 if (addrmode
== VOIDmode
)
4604 op1
= expand_debug_expr (offset
);
4608 offmode
= GET_MODE (op1
);
4609 if (offmode
== VOIDmode
)
4610 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4612 if (addrmode
!= offmode
)
4613 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4615 /* Don't use offset_address here, we don't need a
4616 recognizable address, and we don't want to generate
4618 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4624 if (mode1
== VOIDmode
)
4626 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4629 mode1
= smallest_int_mode_for_size (bitsize
);
4631 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4632 if (maybe_ne (bytepos
, 0))
4634 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4635 bitpos
= num_trailing_bits (bitpos
);
4637 else if (known_eq (bitpos
, 0)
4638 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4639 op0
= adjust_address_nv (op0
, mode
, 0);
4640 else if (GET_MODE (op0
) != mode1
)
4641 op0
= adjust_address_nv (op0
, mode1
, 0);
4643 op0
= copy_rtx (op0
);
4644 if (op0
== orig_op0
)
4645 op0
= shallow_copy_rtx (op0
);
4646 set_mem_attributes (op0
, exp
, 0);
4649 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4652 if (maybe_lt (bitpos
, 0))
4655 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4659 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4660 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4662 machine_mode opmode
= GET_MODE (op0
);
4664 if (opmode
== VOIDmode
)
4665 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4667 /* This condition may hold if we're expanding the address
4668 right past the end of an array that turned out not to
4669 be addressable (i.e., the address was only computed in
4670 debug stmts). The gen_subreg below would rightfully
4671 crash, and the address doesn't really exist, so just
4673 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4676 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4677 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4680 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4681 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4683 : ZERO_EXTRACT
, mode
,
4684 GET_MODE (op0
) != VOIDmode
4686 : TYPE_MODE (TREE_TYPE (tem
)),
4687 op0
, gen_int_mode (bitsize
, word_mode
),
4688 gen_int_mode (bitpos
, word_mode
));
4693 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4696 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4699 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4702 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4704 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4707 case FIX_TRUNC_EXPR
:
4708 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4711 case POINTER_PLUS_EXPR
:
4712 /* For the rare target where pointers are not the same size as
4713 size_t, we need to check for mis-matched modes and correct
4716 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4717 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4718 && op0_mode
!= op1_mode
)
4720 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4721 /* If OP0 is a partial mode, then we must truncate, even
4722 if it has the same bitsize as OP1 as GCC's
4723 representation of partial modes is opaque. */
4724 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4725 && (GET_MODE_BITSIZE (op0_mode
)
4726 == GET_MODE_BITSIZE (op1_mode
))))
4727 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4729 /* We always sign-extend, regardless of the signedness of
4730 the operand, because the operand is always unsigned
4731 here even if the original C expression is signed. */
4732 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4736 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4739 case POINTER_DIFF_EXPR
:
4740 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4743 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4746 case TRUNC_DIV_EXPR
:
4747 case EXACT_DIV_EXPR
:
4749 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4751 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4753 case TRUNC_MOD_EXPR
:
4754 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4756 case FLOOR_DIV_EXPR
:
4758 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4761 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4762 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4763 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4764 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4767 case FLOOR_MOD_EXPR
:
4769 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4772 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4773 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4774 adj
= simplify_gen_unary (NEG
, mode
,
4775 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4777 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4783 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4784 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4785 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4786 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4790 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4791 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4792 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4793 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4799 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4800 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4801 adj
= simplify_gen_unary (NEG
, mode
,
4802 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4804 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4808 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4809 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4810 adj
= simplify_gen_unary (NEG
, mode
,
4811 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4813 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4816 case ROUND_DIV_EXPR
:
4819 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4820 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4821 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4822 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4826 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4827 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4828 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4829 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4832 case ROUND_MOD_EXPR
:
4835 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4836 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4837 adj
= simplify_gen_unary (NEG
, mode
,
4838 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4840 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4844 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4845 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4846 adj
= simplify_gen_unary (NEG
, mode
,
4847 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4849 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4853 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4857 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4859 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4862 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4865 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4868 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4871 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4874 case TRUTH_AND_EXPR
:
4875 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4879 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4882 case TRUTH_XOR_EXPR
:
4883 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4885 case TRUTH_ANDIF_EXPR
:
4886 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4888 case TRUTH_ORIF_EXPR
:
4889 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4891 case TRUTH_NOT_EXPR
:
4892 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4895 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4899 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4903 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4907 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4911 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4914 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4916 case UNORDERED_EXPR
:
4917 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4920 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4923 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4926 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4929 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4932 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4935 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4938 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4941 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4944 gcc_assert (COMPLEX_MODE_P (mode
));
4945 if (GET_MODE (op0
) == VOIDmode
)
4946 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4947 if (GET_MODE (op1
) == VOIDmode
)
4948 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4949 return gen_rtx_CONCAT (mode
, op0
, op1
);
4952 if (GET_CODE (op0
) == CONCAT
)
4953 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4954 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4956 GET_MODE_INNER (mode
)));
4959 scalar_mode imode
= GET_MODE_INNER (mode
);
4964 re
= adjust_address_nv (op0
, imode
, 0);
4965 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4969 scalar_int_mode ifmode
;
4970 scalar_int_mode ihmode
;
4972 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4973 || !int_mode_for_mode (imode
).exists (&ihmode
))
4975 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4978 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4979 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4980 if (imode
!= ihmode
)
4981 re
= gen_rtx_SUBREG (imode
, re
, 0);
4982 im
= copy_rtx (op0
);
4984 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4985 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4986 if (imode
!= ihmode
)
4987 im
= gen_rtx_SUBREG (imode
, im
, 0);
4989 im
= gen_rtx_NEG (imode
, im
);
4990 return gen_rtx_CONCAT (mode
, re
, im
);
4994 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4995 if (!op0
|| !MEM_P (op0
))
4997 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4998 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4999 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
5000 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
5001 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
5002 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
5004 if (handled_component_p (TREE_OPERAND (exp
, 0)))
5006 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
5009 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
5010 &bitsize
, &maxsize
, &reverse
);
5012 || TREE_CODE (decl
) == PARM_DECL
5013 || TREE_CODE (decl
) == RESULT_DECL
)
5014 && (!TREE_ADDRESSABLE (decl
)
5015 || target_for_debug_bind (decl
))
5016 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
5017 && known_gt (bitsize
, 0)
5018 && known_eq (bitsize
, maxsize
))
5020 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
5021 return plus_constant (mode
, base
, byteoffset
);
5025 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
5026 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5029 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5032 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
5033 || (GET_CODE (op0
) == PLUS
5034 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
5035 && CONST_INT_P (XEXP (op0
, 1)))))
5037 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5040 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
5043 return plus_constant (mode
, op0
, offset
);
5050 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
5051 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5052 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
5058 unsigned HOST_WIDE_INT i
, nelts
;
5060 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
5063 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5065 for (i
= 0; i
< nelts
; ++i
)
5067 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
5070 XVECEXP (op0
, 0, i
) = op1
;
5077 if (TREE_CLOBBER_P (exp
))
5079 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5082 unsigned HOST_WIDE_INT nelts
;
5085 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5086 goto flag_unsupported
;
5088 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5090 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5092 op1
= expand_debug_expr (val
);
5095 XVECEXP (op0
, 0, i
) = op1
;
5100 op1
= expand_debug_expr
5101 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5106 for (; i
< nelts
; i
++)
5107 XVECEXP (op0
, 0, i
) = op1
;
5113 goto flag_unsupported
;
5116 /* ??? Maybe handle some builtins? */
5121 gimple
*g
= get_gimple_for_ssa_name (exp
);
5125 if (deep_ter_debug_map
)
5127 tree
*slot
= deep_ter_debug_map
->get (exp
);
5132 t
= gimple_assign_rhs_to_tree (g
);
5133 op0
= expand_debug_expr (t
);
5139 /* If this is a reference to an incoming value of
5140 parameter that is never used in the code or where the
5141 incoming value is never used in the code, use
5142 PARM_DECL's DECL_RTL if set. */
5143 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5144 && SSA_NAME_VAR (exp
)
5145 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5146 && has_zero_uses (exp
))
5148 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5151 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5156 int part
= var_to_partition (SA
.map
, exp
);
5158 if (part
== NO_PARTITION
)
5161 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5163 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5171 /* Vector stuff. For most of the codes we don't have rtl codes. */
5172 case REALIGN_LOAD_EXPR
:
5174 case VEC_PACK_FIX_TRUNC_EXPR
:
5175 case VEC_PACK_FLOAT_EXPR
:
5176 case VEC_PACK_SAT_EXPR
:
5177 case VEC_PACK_TRUNC_EXPR
:
5178 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
5179 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
5180 case VEC_UNPACK_FLOAT_HI_EXPR
:
5181 case VEC_UNPACK_FLOAT_LO_EXPR
:
5182 case VEC_UNPACK_HI_EXPR
:
5183 case VEC_UNPACK_LO_EXPR
:
5184 case VEC_WIDEN_MULT_HI_EXPR
:
5185 case VEC_WIDEN_MULT_LO_EXPR
:
5186 case VEC_WIDEN_MULT_EVEN_EXPR
:
5187 case VEC_WIDEN_MULT_ODD_EXPR
:
5188 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5189 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5191 case VEC_DUPLICATE_EXPR
:
5192 case VEC_SERIES_EXPR
:
5197 case ADDR_SPACE_CONVERT_EXPR
:
5198 case FIXED_CONVERT_EXPR
:
5200 case WITH_SIZE_EXPR
:
5201 case BIT_INSERT_EXPR
:
5205 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5206 && SCALAR_INT_MODE_P (mode
))
5209 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5211 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5214 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5216 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5218 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5219 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5223 case WIDEN_MULT_EXPR
:
5224 case WIDEN_MULT_PLUS_EXPR
:
5225 case WIDEN_MULT_MINUS_EXPR
:
5226 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5227 && SCALAR_INT_MODE_P (mode
))
5229 inner_mode
= GET_MODE (op0
);
5230 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5231 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5233 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5234 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5235 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5237 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5238 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5239 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5241 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5242 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5244 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5248 case MULT_HIGHPART_EXPR
:
5249 /* ??? Similar to the above. */
5252 case WIDEN_SUM_EXPR
:
5253 case WIDEN_LSHIFT_EXPR
:
5254 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5255 && SCALAR_INT_MODE_P (mode
))
5258 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5260 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5262 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5263 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5278 /* Return an RTX equivalent to the source bind value of the tree expression
5282 expand_debug_source_expr (tree exp
)
5285 machine_mode mode
= VOIDmode
, inner_mode
;
5287 switch (TREE_CODE (exp
))
5290 if (DECL_ABSTRACT_ORIGIN (exp
))
5291 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp
));
5295 mode
= DECL_MODE (exp
);
5296 op0
= expand_debug_parm_decl (exp
);
5299 /* See if this isn't an argument that has been completely
5301 if (!DECL_RTL_SET_P (exp
)
5302 && !DECL_INCOMING_RTL (exp
)
5303 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5305 tree aexp
= DECL_ORIGIN (exp
);
5306 if (DECL_CONTEXT (aexp
)
5307 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5309 vec
<tree
, va_gc
> **debug_args
;
5312 debug_args
= decl_debug_args_lookup (current_function_decl
);
5313 if (debug_args
!= NULL
)
5315 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5318 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5328 if (op0
== NULL_RTX
)
5331 inner_mode
= GET_MODE (op0
);
5332 if (mode
== inner_mode
)
5335 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5337 if (GET_MODE_UNIT_BITSIZE (mode
)
5338 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5339 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5340 else if (GET_MODE_UNIT_BITSIZE (mode
)
5341 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5342 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5344 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5346 else if (FLOAT_MODE_P (mode
))
5348 else if (FLOAT_MODE_P (inner_mode
))
5350 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5351 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5353 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5355 else if (GET_MODE_UNIT_PRECISION (mode
)
5356 == GET_MODE_UNIT_PRECISION (inner_mode
))
5357 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5358 else if (GET_MODE_UNIT_PRECISION (mode
)
5359 < GET_MODE_UNIT_PRECISION (inner_mode
))
5360 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5361 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5362 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5364 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5369 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5370 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5371 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5374 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5378 if (exp
== NULL_RTX
)
5381 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5386 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5387 rtx dval
= make_debug_expr_from_rtl (exp
);
5389 /* Emit a debug bind insn before INSN. */
5390 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5391 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5392 VAR_INIT_STATUS_INITIALIZED
);
5394 emit_debug_insn_before (bind
, insn
);
5399 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5401 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5402 switch (*format_ptr
++)
5405 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5410 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5411 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5419 /* Expand the _LOCs in debug insns. We run this after expanding all
5420 regular insns, so that any variables referenced in the function
5421 will have their DECL_RTLs set. */
5424 expand_debug_locations (void)
5427 rtx_insn
*last
= get_last_insn ();
5428 int save_strict_alias
= flag_strict_aliasing
;
5430 /* New alias sets while setting up memory attributes cause
5431 -fcompare-debug failures, even though it doesn't bring about any
5433 flag_strict_aliasing
= 0;
5435 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5436 if (DEBUG_BIND_INSN_P (insn
))
5438 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5440 rtx_insn
*prev_insn
, *insn2
;
5443 if (value
== NULL_TREE
)
5447 if (INSN_VAR_LOCATION_STATUS (insn
)
5448 == VAR_INIT_STATUS_UNINITIALIZED
)
5449 val
= expand_debug_source_expr (value
);
5450 /* The avoid_deep_ter_for_debug function inserts
5451 debug bind stmts after SSA_NAME definition, with the
5452 SSA_NAME as the whole bind location. Disable temporarily
5453 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5454 being defined in this DEBUG_INSN. */
5455 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5457 tree
*slot
= deep_ter_debug_map
->get (value
);
5460 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5465 val
= expand_debug_expr (value
);
5467 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5470 val
= expand_debug_expr (value
);
5471 gcc_assert (last
== get_last_insn ());
5475 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5478 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5480 gcc_assert (mode
== GET_MODE (val
)
5481 || (GET_MODE (val
) == VOIDmode
5482 && (CONST_SCALAR_INT_P (val
)
5483 || GET_CODE (val
) == CONST_FIXED
5484 || GET_CODE (val
) == LABEL_REF
)));
5487 INSN_VAR_LOCATION_LOC (insn
) = val
;
5488 prev_insn
= PREV_INSN (insn
);
5489 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5490 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5493 flag_strict_aliasing
= save_strict_alias
;
5496 /* Performs swapping operands of commutative operations to expand
5497 the expensive one first. */
5500 reorder_operands (basic_block bb
)
5502 unsigned int *lattice
; /* Hold cost of each statement. */
5503 unsigned int i
= 0, n
= 0;
5504 gimple_stmt_iterator gsi
;
5510 use_operand_p use_p
;
5511 gimple
*def0
, *def1
;
5513 /* Compute cost of each statement using estimate_num_insns. */
5514 stmts
= bb_seq (bb
);
5515 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5517 stmt
= gsi_stmt (gsi
);
5518 if (!is_gimple_debug (stmt
))
5519 gimple_set_uid (stmt
, n
++);
5521 lattice
= XNEWVEC (unsigned int, n
);
5522 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5525 stmt
= gsi_stmt (gsi
);
5526 if (is_gimple_debug (stmt
))
5528 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5530 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5532 tree use
= USE_FROM_PTR (use_p
);
5534 if (TREE_CODE (use
) != SSA_NAME
)
5536 def_stmt
= get_gimple_for_ssa_name (use
);
5539 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5542 if (!is_gimple_assign (stmt
)
5543 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5545 op0
= gimple_op (stmt
, 1);
5546 op1
= gimple_op (stmt
, 2);
5547 if (TREE_CODE (op0
) != SSA_NAME
5548 || TREE_CODE (op1
) != SSA_NAME
)
5550 /* Swap operands if the second one is more expensive. */
5551 def0
= get_gimple_for_ssa_name (op0
);
5552 def1
= get_gimple_for_ssa_name (op1
);
5556 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5560 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5562 fprintf (dump_file
, "Swap operands in stmt:\n");
5563 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5564 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5565 def0
? lattice
[gimple_uid (def0
)] : 0,
5566 lattice
[gimple_uid (def1
)]);
5568 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5569 gimple_assign_rhs2_ptr (stmt
));
5575 /* Expand basic block BB from GIMPLE trees to RTL. */
5578 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5580 gimple_stmt_iterator gsi
;
5582 gimple
*stmt
= NULL
;
5583 rtx_note
*note
= NULL
;
5589 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5592 /* Note that since we are now transitioning from GIMPLE to RTL, we
5593 cannot use the gsi_*_bb() routines because they expect the basic
5594 block to be in GIMPLE, instead of RTL. Therefore, we need to
5595 access the BB sequence directly. */
5597 reorder_operands (bb
);
5598 stmts
= bb_seq (bb
);
5599 bb
->il
.gimple
.seq
= NULL
;
5600 bb
->il
.gimple
.phi_nodes
= NULL
;
5601 rtl_profile_for_bb (bb
);
5602 init_rtl_bb_info (bb
);
5603 bb
->flags
|= BB_RTL
;
5605 /* Remove the RETURN_EXPR if we may fall though to the exit
5607 gsi
= gsi_last (stmts
);
5608 if (!gsi_end_p (gsi
)
5609 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5611 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5613 gcc_assert (single_succ_p (bb
));
5614 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5616 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5617 && !gimple_return_retval (ret_stmt
))
5619 gsi_remove (&gsi
, false);
5620 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5624 gsi
= gsi_start (stmts
);
5625 if (!gsi_end_p (gsi
))
5627 stmt
= gsi_stmt (gsi
);
5628 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5632 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5636 gcc_checking_assert (!note
);
5637 last
= get_last_insn ();
5641 expand_gimple_stmt (stmt
);
5648 BB_HEAD (bb
) = NEXT_INSN (last
);
5649 if (NOTE_P (BB_HEAD (bb
)))
5650 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5651 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5652 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5654 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5657 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5660 NOTE_BASIC_BLOCK (note
) = bb
;
5662 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5666 stmt
= gsi_stmt (gsi
);
5668 /* If this statement is a non-debug one, and we generate debug
5669 insns, then this one might be the last real use of a TERed
5670 SSA_NAME, but where there are still some debug uses further
5671 down. Expanding the current SSA name in such further debug
5672 uses by their RHS might lead to wrong debug info, as coalescing
5673 might make the operands of such RHS be placed into the same
5674 pseudo as something else. Like so:
5675 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5679 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5680 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5681 the write to a_2 would actually have clobbered the place which
5684 So, instead of that, we recognize the situation, and generate
5685 debug temporaries at the last real use of TERed SSA names:
5692 if (MAY_HAVE_DEBUG_BIND_INSNS
5694 && !is_gimple_debug (stmt
))
5700 location_t sloc
= curr_insn_location ();
5702 /* Look for SSA names that have their last use here (TERed
5703 names always have only one real use). */
5704 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5705 if ((def
= get_gimple_for_ssa_name (op
)))
5707 imm_use_iterator imm_iter
;
5708 use_operand_p use_p
;
5709 bool have_debug_uses
= false;
5711 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5713 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5715 have_debug_uses
= true;
5720 if (have_debug_uses
)
5722 /* OP is a TERed SSA name, with DEF its defining
5723 statement, and where OP is used in further debug
5724 instructions. Generate a debug temporary, and
5725 replace all uses of OP in debug insns with that
5728 tree value
= gimple_assign_rhs_to_tree (def
);
5729 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5733 set_curr_insn_location (gimple_location (def
));
5735 DECL_ARTIFICIAL (vexpr
) = 1;
5736 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5738 mode
= DECL_MODE (value
);
5740 mode
= TYPE_MODE (TREE_TYPE (value
));
5741 SET_DECL_MODE (vexpr
, mode
);
5743 val
= gen_rtx_VAR_LOCATION
5744 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5746 emit_debug_insn (val
);
5748 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5750 if (!gimple_debug_bind_p (debugstmt
))
5753 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5754 SET_USE (use_p
, vexpr
);
5756 update_stmt (debugstmt
);
5760 set_curr_insn_location (sloc
);
5763 currently_expanding_gimple_stmt
= stmt
;
5765 /* Expand this statement, then evaluate the resulting RTL and
5766 fixup the CFG accordingly. */
5767 if (gimple_code (stmt
) == GIMPLE_COND
)
5769 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5773 else if (is_gimple_debug (stmt
))
5775 location_t sloc
= curr_insn_location ();
5776 gimple_stmt_iterator nsi
= gsi
;
5781 tree value
= NULL_TREE
;
5785 if (!gimple_debug_nonbind_marker_p (stmt
))
5787 if (gimple_debug_bind_p (stmt
))
5789 var
= gimple_debug_bind_get_var (stmt
);
5791 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5792 && TREE_CODE (var
) != LABEL_DECL
5793 && !target_for_debug_bind (var
))
5794 goto delink_debug_stmt
;
5797 mode
= DECL_MODE (var
);
5799 mode
= TYPE_MODE (TREE_TYPE (var
));
5801 if (gimple_debug_bind_has_value_p (stmt
))
5802 value
= gimple_debug_bind_get_value (stmt
);
5804 val
= gen_rtx_VAR_LOCATION
5805 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5807 else if (gimple_debug_source_bind_p (stmt
))
5809 var
= gimple_debug_source_bind_get_var (stmt
);
5811 value
= gimple_debug_source_bind_get_value (stmt
);
5813 mode
= DECL_MODE (var
);
5815 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5816 VAR_INIT_STATUS_UNINITIALIZED
);
5821 /* If this function was first compiled with markers
5822 enabled, but they're now disable (e.g. LTO), drop
5823 them on the floor. */
5824 else if (gimple_debug_nonbind_marker_p (stmt
)
5825 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5826 goto delink_debug_stmt
;
5827 else if (gimple_debug_begin_stmt_p (stmt
))
5828 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5829 else if (gimple_debug_inline_entry_p (stmt
))
5831 tree block
= gimple_block (stmt
);
5834 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5836 goto delink_debug_stmt
;
5841 last
= get_last_insn ();
5843 set_curr_insn_location (gimple_location (stmt
));
5845 emit_debug_insn (val
);
5847 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5849 /* We can't dump the insn with a TREE where an RTX
5851 if (GET_CODE (val
) == VAR_LOCATION
)
5853 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5854 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5856 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5857 if (GET_CODE (val
) == VAR_LOCATION
)
5858 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5862 /* In order not to generate too many debug temporaries,
5863 we delink all uses of debug statements we already expanded.
5864 Therefore debug statements between definition and real
5865 use of TERed SSA names will continue to use the SSA name,
5866 and not be replaced with debug temps. */
5867 delink_stmt_imm_use (stmt
);
5871 if (gsi_end_p (nsi
))
5873 stmt
= gsi_stmt (nsi
);
5874 if (!is_gimple_debug (stmt
))
5878 set_curr_insn_location (sloc
);
5882 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5884 && gimple_call_tail_p (call_stmt
)
5885 && disable_tail_calls
)
5886 gimple_call_set_tail (call_stmt
, false);
5888 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5891 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5902 def_operand_p def_p
;
5903 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5907 /* Ignore this stmt if it is in the list of
5908 replaceable expressions. */
5910 && bitmap_bit_p (SA
.values
,
5911 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5914 last
= expand_gimple_stmt (stmt
);
5915 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5920 currently_expanding_gimple_stmt
= NULL
;
5922 /* Expand implicit goto and convert goto_locus. */
5923 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5925 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5926 set_curr_insn_location (e
->goto_locus
);
5927 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5929 emit_jump (label_rtx_for_bb (e
->dest
));
5930 e
->flags
&= ~EDGE_FALLTHRU
;
5934 /* Expanded RTL can create a jump in the last instruction of block.
5935 This later might be assumed to be a jump to successor and break edge insertion.
5936 We need to insert dummy move to prevent this. PR41440. */
5937 if (single_succ_p (bb
)
5938 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5939 && (last
= get_last_insn ())
5941 || (DEBUG_INSN_P (last
)
5942 && JUMP_P (prev_nondebug_insn (last
)))))
5944 rtx dummy
= gen_reg_rtx (SImode
);
5945 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5948 do_pending_stack_adjust ();
5950 /* Find the block tail. The last insn in the block is the insn
5951 before a barrier and/or table jump insn. */
5952 last
= get_last_insn ();
5953 if (BARRIER_P (last
))
5954 last
= PREV_INSN (last
);
5955 if (JUMP_TABLE_DATA_P (last
))
5956 last
= PREV_INSN (PREV_INSN (last
));
5957 if (BARRIER_P (last
))
5958 last
= PREV_INSN (last
);
5961 update_bb_for_insn (bb
);
5967 /* Create a basic block for initialization code. */
5970 construct_init_block (void)
5972 basic_block init_block
, first_block
;
5976 /* Multiple entry points not supported yet. */
5977 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5978 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5979 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5980 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5981 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5983 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5985 /* When entry edge points to first basic block, we don't need jump,
5986 otherwise we have to jump into proper target. */
5987 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5989 tree label
= gimple_block_label (e
->dest
);
5991 emit_jump (jump_target_rtx (label
));
5995 flags
= EDGE_FALLTHRU
;
5997 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5999 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6000 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6001 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6004 first_block
= e
->dest
;
6005 redirect_edge_succ (e
, init_block
);
6006 make_single_succ_edge (init_block
, first_block
, flags
);
6009 make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6012 update_bb_for_insn (init_block
);
6016 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6017 found in the block tree. */
6020 set_block_levels (tree block
, int level
)
6024 BLOCK_NUMBER (block
) = level
;
6025 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
6026 block
= BLOCK_CHAIN (block
);
6030 /* Create a block containing landing pads and similar stuff. */
6033 construct_exit_block (void)
6035 rtx_insn
*head
= get_last_insn ();
6037 basic_block exit_block
;
6041 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
6042 rtx_insn
*orig_end
= BB_END (prev_bb
);
6044 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6046 /* Make sure the locus is set to the end of the function, so that
6047 epilogue line numbers and warnings are set properly. */
6048 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
6049 input_location
= cfun
->function_end_locus
;
6051 /* Generate rtl for function exit. */
6052 expand_function_end ();
6054 end
= get_last_insn ();
6057 /* While emitting the function end we could move end of the last basic
6059 BB_END (prev_bb
) = orig_end
;
6060 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
6061 head
= NEXT_INSN (head
);
6062 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6063 bb count counting will be confused. Any instructions before that
6064 label are emitted for the case where PREV_BB falls through into the
6065 exit block, so append those instructions to prev_bb in that case. */
6066 if (NEXT_INSN (head
) != return_label
)
6068 while (NEXT_INSN (head
) != return_label
)
6070 if (!NOTE_P (NEXT_INSN (head
)))
6071 BB_END (prev_bb
) = NEXT_INSN (head
);
6072 head
= NEXT_INSN (head
);
6075 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
6076 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
6077 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6080 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
6082 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6083 if (!(e
->flags
& EDGE_ABNORMAL
))
6084 redirect_edge_succ (e
, exit_block
);
6089 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6091 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6094 exit_block
->count
-= e2
->count ();
6096 update_bb_for_insn (exit_block
);
6099 /* Helper function for discover_nonconstant_array_refs.
6100 Look for ARRAY_REF nodes with non-constant indexes and mark them
6104 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6105 void *data ATTRIBUTE_UNUSED
)
6109 if (IS_TYPE_OR_DECL_P (t
))
6111 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6113 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6114 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6115 && (!TREE_OPERAND (t
, 2)
6116 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6117 || (TREE_CODE (t
) == COMPONENT_REF
6118 && (!TREE_OPERAND (t
,2)
6119 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6120 || TREE_CODE (t
) == BIT_FIELD_REF
6121 || TREE_CODE (t
) == REALPART_EXPR
6122 || TREE_CODE (t
) == IMAGPART_EXPR
6123 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6124 || CONVERT_EXPR_P (t
))
6125 t
= TREE_OPERAND (t
, 0);
6127 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6129 t
= get_base_address (t
);
6131 && DECL_MODE (t
) != BLKmode
)
6132 TREE_ADDRESSABLE (t
) = 1;
6137 /* References of size POLY_INT_CST to a fixed-size object must go
6138 through memory. It's more efficient to force that here than
6139 to create temporary slots on the fly. */
6140 else if ((TREE_CODE (t
) == MEM_REF
|| TREE_CODE (t
) == TARGET_MEM_REF
)
6141 && TYPE_SIZE (TREE_TYPE (t
))
6142 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t
))))
6144 tree base
= get_base_address (t
);
6147 && DECL_MODE (base
) != BLKmode
6148 && GET_MODE_SIZE (DECL_MODE (base
)).is_constant ())
6149 TREE_ADDRESSABLE (base
) = 1;
6156 /* RTL expansion is not able to compile array references with variable
6157 offsets for arrays stored in single register. Discover such
6158 expressions and mark variables as addressable to avoid this
6162 discover_nonconstant_array_refs (void)
6165 gimple_stmt_iterator gsi
;
6167 FOR_EACH_BB_FN (bb
, cfun
)
6168 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6170 gimple
*stmt
= gsi_stmt (gsi
);
6171 if (!is_gimple_debug (stmt
))
6173 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6174 gcall
*call
= dyn_cast
<gcall
*> (stmt
);
6175 if (call
&& gimple_call_internal_p (call
))
6176 switch (gimple_call_internal_fn (call
))
6178 case IFN_LOAD_LANES
:
6179 /* The source must be a MEM. */
6180 mark_addressable (gimple_call_arg (call
, 0));
6182 case IFN_STORE_LANES
:
6183 /* The destination must be a MEM. */
6184 mark_addressable (gimple_call_lhs (call
));
6193 /* This function sets crtl->args.internal_arg_pointer to a virtual
6194 register if DRAP is needed. Local register allocator will replace
6195 virtual_incoming_args_rtx with the virtual register. */
6198 expand_stack_alignment (void)
6201 unsigned int preferred_stack_boundary
;
6203 if (! SUPPORTS_STACK_ALIGNMENT
)
6206 if (cfun
->calls_alloca
6207 || cfun
->has_nonlocal_label
6208 || crtl
->has_nonlocal_goto
)
6209 crtl
->need_drap
= true;
6211 /* Call update_stack_boundary here again to update incoming stack
6212 boundary. It may set incoming stack alignment to a different
6213 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6214 use the minimum incoming stack alignment to check if it is OK
6215 to perform sibcall optimization since sibcall optimization will
6216 only align the outgoing stack to incoming stack boundary. */
6217 if (targetm
.calls
.update_stack_boundary
)
6218 targetm
.calls
.update_stack_boundary ();
6220 /* The incoming stack frame has to be aligned at least at
6221 parm_stack_boundary. */
6222 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6224 /* Update crtl->stack_alignment_estimated and use it later to align
6225 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6226 exceptions since callgraph doesn't collect incoming stack alignment
6228 if (cfun
->can_throw_non_call_exceptions
6229 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6230 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6232 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6233 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6234 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6235 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6236 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6238 gcc_assert (crtl
->stack_alignment_needed
6239 <= crtl
->stack_alignment_estimated
);
6241 crtl
->stack_realign_needed
6242 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6243 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6245 crtl
->stack_realign_processed
= true;
6247 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6249 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6250 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6252 /* stack_realign_drap and drap_rtx must match. */
6253 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6255 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6256 if (drap_rtx
!= NULL
)
6258 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6260 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6262 fixup_tail_calls ();
6268 expand_main_function (void)
6270 #if (defined(INVOKE__main) \
6271 || (!defined(HAS_INIT_SECTION) \
6272 && !defined(INIT_SECTION_ASM_OP) \
6273 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6274 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6279 /* Expand code to initialize the stack_protect_guard. This is invoked at
6280 the beginning of a function to be protected. */
6283 stack_protect_prologue (void)
6285 tree guard_decl
= targetm
.stack_protect_guard ();
6288 crtl
->stack_protect_guard_decl
= guard_decl
;
6289 x
= expand_normal (crtl
->stack_protect_guard
);
6291 if (targetm
.have_stack_protect_combined_set () && guard_decl
)
6293 gcc_assert (DECL_P (guard_decl
));
6294 y
= DECL_RTL (guard_decl
);
6296 /* Allow the target to compute address of Y and copy it to X without
6297 leaking Y into a register. This combined address + copy pattern
6298 allows the target to prevent spilling of any intermediate results by
6299 splitting it after register allocator. */
6300 if (rtx_insn
*insn
= targetm
.gen_stack_protect_combined_set (x
, y
))
6308 y
= expand_normal (guard_decl
);
6312 /* Allow the target to copy from Y to X without leaking Y into a
6314 if (targetm
.have_stack_protect_set ())
6315 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6321 /* Otherwise do a straight move. */
6322 emit_move_insn (x
, y
);
6325 /* Translate the intermediate representation contained in the CFG
6326 from GIMPLE trees to RTL.
6328 We do conversion per basic block and preserve/update the tree CFG.
6329 This implies we have to do some magic as the CFG can simultaneously
6330 consist of basic blocks containing RTL and GIMPLE trees. This can
6331 confuse the CFG hooks, so be careful to not manipulate CFG during
6336 const pass_data pass_data_expand
=
6338 RTL_PASS
, /* type */
6339 "expand", /* name */
6340 OPTGROUP_NONE
, /* optinfo_flags */
6341 TV_EXPAND
, /* tv_id */
6342 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6345 | PROP_gimple_lva
), /* properties_required */
6346 PROP_rtl
, /* properties_provided */
6347 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6348 0, /* todo_flags_start */
6349 0, /* todo_flags_finish */
6352 class pass_expand
: public rtl_opt_pass
6355 pass_expand (gcc::context
*ctxt
)
6356 : rtl_opt_pass (pass_data_expand
, ctxt
)
6359 /* opt_pass methods: */
6360 virtual unsigned int execute (function
*);
6362 }; // class pass_expand
6365 pass_expand::execute (function
*fun
)
6367 basic_block bb
, init_block
;
6370 rtx_insn
*var_seq
, *var_ret_seq
;
6373 timevar_push (TV_OUT_OF_SSA
);
6374 rewrite_out_of_ssa (&SA
);
6375 timevar_pop (TV_OUT_OF_SSA
);
6376 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6378 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6380 gimple_stmt_iterator gsi
;
6381 FOR_EACH_BB_FN (bb
, cfun
)
6382 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6383 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6384 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6387 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6388 discover_nonconstant_array_refs ();
6390 /* Make sure all values used by the optimization passes have sane
6394 /* Some backends want to know that we are expanding to RTL. */
6395 currently_expanding_to_rtl
= 1;
6396 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6397 free_dominance_info (CDI_DOMINATORS
);
6399 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6401 insn_locations_init ();
6402 if (!DECL_IS_BUILTIN (current_function_decl
))
6404 /* Eventually, all FEs should explicitly set function_start_locus. */
6405 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6406 set_curr_insn_location
6407 (DECL_SOURCE_LOCATION (current_function_decl
));
6409 set_curr_insn_location (fun
->function_start_locus
);
6412 set_curr_insn_location (UNKNOWN_LOCATION
);
6413 prologue_location
= curr_insn_location ();
6415 #ifdef INSN_SCHEDULING
6416 init_sched_attrs ();
6419 /* Make sure first insn is a note even if we don't want linenums.
6420 This makes sure the first insn will never be deleted.
6421 Also, final expects a note to appear there. */
6422 emit_note (NOTE_INSN_DELETED
);
6424 targetm
.expand_to_rtl_hook ();
6425 crtl
->init_stack_alignment ();
6426 fun
->cfg
->max_jumptable_ents
= 0;
6428 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6429 of the function section at exapnsion time to predict distance of calls. */
6430 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6432 /* Expand the variables recorded during gimple lowering. */
6433 timevar_push (TV_VAR_EXPAND
);
6436 var_ret_seq
= expand_used_vars ();
6438 var_seq
= get_insns ();
6440 timevar_pop (TV_VAR_EXPAND
);
6442 /* Honor stack protection warnings. */
6443 if (warn_stack_protect
)
6445 if (fun
->calls_alloca
)
6446 warning (OPT_Wstack_protector
,
6447 "stack protector not protecting local variables: "
6448 "variable length buffer");
6449 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6450 warning (OPT_Wstack_protector
,
6451 "stack protector not protecting function: "
6452 "all local arrays are less than %d bytes long",
6453 (int) param_ssp_buffer_size
);
6456 /* Set up parameters and prepare for return, for the function. */
6457 expand_function_start (current_function_decl
);
6459 /* If we emitted any instructions for setting up the variables,
6460 emit them before the FUNCTION_START note. */
6463 emit_insn_before (var_seq
, parm_birth_insn
);
6465 /* In expand_function_end we'll insert the alloca save/restore
6466 before parm_birth_insn. We've just insertted an alloca call.
6467 Adjust the pointer to match. */
6468 parm_birth_insn
= var_seq
;
6471 /* Now propagate the RTL assignment of each partition to the
6472 underlying var of each SSA_NAME. */
6475 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6477 /* We might have generated new SSA names in
6478 update_alias_info_with_stack_vars. They will have a NULL
6479 defining statements, and won't be part of the partitioning,
6481 if (!SSA_NAME_DEF_STMT (name
))
6484 adjust_one_expanded_partition_var (name
);
6487 /* Clean up RTL of variables that straddle across multiple
6488 partitions, and check that the rtl of any PARM_DECLs that are not
6489 cleaned up is that of their default defs. */
6490 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6494 /* We might have generated new SSA names in
6495 update_alias_info_with_stack_vars. They will have a NULL
6496 defining statements, and won't be part of the partitioning,
6498 if (!SSA_NAME_DEF_STMT (name
))
6500 part
= var_to_partition (SA
.map
, name
);
6501 if (part
== NO_PARTITION
)
6504 /* If this decl was marked as living in multiple places, reset
6505 this now to NULL. */
6506 tree var
= SSA_NAME_VAR (name
);
6507 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6508 SET_DECL_RTL (var
, NULL
);
6509 /* Check that the pseudos chosen by assign_parms are those of
6510 the corresponding default defs. */
6511 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6512 && (TREE_CODE (var
) == PARM_DECL
6513 || TREE_CODE (var
) == RESULT_DECL
))
6515 rtx in
= DECL_RTL_IF_SET (var
);
6517 rtx out
= SA
.partition_to_pseudo
[part
];
6518 gcc_assert (in
== out
);
6520 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6521 those expected by debug backends for each parm and for
6522 the result. This is particularly important for stabs,
6523 whose register elimination from parm's DECL_RTL may cause
6524 -fcompare-debug differences as SET_DECL_RTL changes reg's
6525 attrs. So, make sure the RTL already has the parm as the
6526 EXPR, so that it won't change. */
6527 SET_DECL_RTL (var
, NULL_RTX
);
6529 set_mem_attributes (in
, var
, true);
6530 SET_DECL_RTL (var
, in
);
6534 /* If this function is `main', emit a call to `__main'
6535 to run global initializers, etc. */
6536 if (DECL_NAME (current_function_decl
)
6537 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6538 && DECL_FILE_SCOPE_P (current_function_decl
))
6539 expand_main_function ();
6541 /* Initialize the stack_protect_guard field. This must happen after the
6542 call to __main (if any) so that the external decl is initialized. */
6543 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6544 stack_protect_prologue ();
6546 expand_phi_nodes (&SA
);
6548 /* Release any stale SSA redirection data. */
6549 redirect_edge_var_map_empty ();
6551 /* Register rtl specific functions for cfg. */
6552 rtl_register_cfg_hooks ();
6554 init_block
= construct_init_block ();
6556 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6557 remaining edges later. */
6558 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6559 e
->flags
&= ~EDGE_EXECUTABLE
;
6561 /* If the function has too many markers, drop them while expanding. */
6562 if (cfun
->debug_marker_count
6563 >= param_max_debug_marker_count
)
6564 cfun
->debug_nonbind_markers
= false;
6566 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6567 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6569 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6571 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6572 expand_debug_locations ();
6574 if (deep_ter_debug_map
)
6576 delete deep_ter_debug_map
;
6577 deep_ter_debug_map
= NULL
;
6580 /* Free stuff we no longer need after GIMPLE optimizations. */
6581 free_dominance_info (CDI_DOMINATORS
);
6582 free_dominance_info (CDI_POST_DOMINATORS
);
6583 delete_tree_cfg_annotations (fun
);
6585 timevar_push (TV_OUT_OF_SSA
);
6586 finish_out_of_ssa (&SA
);
6587 timevar_pop (TV_OUT_OF_SSA
);
6589 timevar_push (TV_POST_EXPAND
);
6590 /* We are no longer in SSA form. */
6591 fun
->gimple_df
->in_ssa_p
= false;
6592 loops_state_clear (LOOP_CLOSED_SSA
);
6594 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6595 conservatively to true until they are all profile aware. */
6596 delete lab_rtx_for_bb
;
6597 free_histograms (fun
);
6599 construct_exit_block ();
6600 insn_locations_finalize ();
6604 rtx_insn
*after
= return_label
;
6605 rtx_insn
*next
= NEXT_INSN (after
);
6606 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6608 emit_insn_after (var_ret_seq
, after
);
6611 /* Zap the tree EH table. */
6612 set_eh_throw_stmt_table (fun
, NULL
);
6614 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6615 split edges which edge insertions might do. */
6616 rebuild_jump_labels (get_insns ());
6618 /* If we have a single successor to the entry block, put the pending insns
6619 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6620 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6622 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6625 rtx_insn
*insns
= e
->insns
.r
;
6627 rebuild_jump_labels_chain (insns
);
6628 if (NOTE_P (parm_birth_insn
)
6629 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6630 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6632 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6636 /* Otherwise, as well as for other edges, take the usual way. */
6637 commit_edge_insertions ();
6639 /* We're done expanding trees to RTL. */
6640 currently_expanding_to_rtl
= 0;
6642 flush_mark_addressable_queue ();
6644 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6645 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6649 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6651 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6652 e
->flags
&= ~EDGE_EXECUTABLE
;
6654 /* At the moment not all abnormal edges match the RTL
6655 representation. It is safe to remove them here as
6656 find_many_sub_basic_blocks will rediscover them.
6657 In the future we should get this fixed properly. */
6658 if ((e
->flags
& EDGE_ABNORMAL
)
6659 && !(e
->flags
& EDGE_SIBCALL
))
6666 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6667 bitmap_ones (blocks
);
6668 find_many_sub_basic_blocks (blocks
);
6669 purge_all_dead_edges ();
6671 /* After initial rtl generation, call back to finish generating
6672 exception support code. We need to do this before cleaning up
6673 the CFG as the code does not expect dead landing pads. */
6674 if (fun
->eh
->region_tree
!= NULL
)
6675 finish_eh_generation ();
6677 /* Call expand_stack_alignment after finishing all
6678 updates to crtl->preferred_stack_boundary. */
6679 expand_stack_alignment ();
6681 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6683 if (crtl
->tail_call_emit
)
6684 fixup_tail_calls ();
6686 /* BB subdivision may have created basic blocks that are are only reachable
6687 from unlikely bbs but not marked as such in the profile. */
6689 propagate_unlikely_bbs_forward ();
6691 /* Remove unreachable blocks, otherwise we cannot compute dominators
6692 which are needed for loop state verification. As a side-effect
6693 this also compacts blocks.
6694 ??? We cannot remove trivially dead insns here as for example
6695 the DRAP reg on i?86 is not magically live at this point.
6696 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6697 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6699 checking_verify_flow_info ();
6701 /* Initialize pseudos allocated for hard registers. */
6702 emit_initial_value_sets ();
6704 /* And finally unshare all RTL. */
6707 /* There's no need to defer outputting this function any more; we
6708 know we want to output it. */
6709 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6711 /* Now that we're done expanding trees to RTL, we shouldn't have any
6712 more CONCATs anywhere. */
6713 generating_concat_p
= 0;
6718 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6719 /* And the pass manager will dump RTL for us. */
6722 /* If we're emitting a nested function, make sure its parent gets
6723 emitted as well. Doing otherwise confuses debug info. */
6726 for (parent
= DECL_CONTEXT (current_function_decl
);
6727 parent
!= NULL_TREE
;
6728 parent
= get_containing_scope (parent
))
6729 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6730 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6733 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6735 /* After expanding, the return labels are no longer needed. */
6736 return_label
= NULL
;
6737 naked_return_label
= NULL
;
6739 /* After expanding, the tm_restart map is no longer needed. */
6740 if (fun
->gimple_df
->tm_restart
)
6741 fun
->gimple_df
->tm_restart
= NULL
;
6743 /* Tag the blocks with a depth number so that change_scope can find
6744 the common parent easily. */
6745 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6746 default_rtl_profile ();
6748 /* For -dx discard loops now, otherwise IL verify in clean_state will
6750 if (rtl_dump_and_exit
)
6752 cfun
->curr_properties
&= ~PROP_loops
;
6753 loop_optimizer_finalize ();
6756 timevar_pop (TV_POST_EXPAND
);
6764 make_pass_expand (gcc::context
*ctxt
)
6766 return new pass_expand (ctxt
);