1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
73 #include "tree-ssa-address.h"
77 /* Some systems use __main in a way incompatible with its use in gcc, in these
78 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
79 give the same symbol without quotes for an alternative entry point. You
80 must define both, or neither. */
82 #define NAME__MAIN "__main"
85 /* This variable holds information helping the rewriting of SSA trees
89 /* This variable holds the currently expanded gimple statement for purposes
90 of comminucating the profile info to the builtin expanders. */
91 gimple
*currently_expanding_gimple_stmt
;
93 static rtx
expand_debug_expr (tree
);
95 static bool defer_stack_allocation (tree
, bool);
97 static void record_alignment_for_reg_var (unsigned int);
99 /* Return an expression tree corresponding to the RHS of GIMPLE
103 gimple_assign_rhs_to_tree (gimple
*stmt
)
106 switch (get_gimple_rhs_class (gimple_expr_code (stmt
)))
108 case GIMPLE_TERNARY_RHS
:
109 t
= build3 (gimple_assign_rhs_code (stmt
),
110 TREE_TYPE (gimple_assign_lhs (stmt
)),
111 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
),
112 gimple_assign_rhs3 (stmt
));
114 case GIMPLE_BINARY_RHS
:
115 t
= build2 (gimple_assign_rhs_code (stmt
),
116 TREE_TYPE (gimple_assign_lhs (stmt
)),
117 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
));
119 case GIMPLE_UNARY_RHS
:
120 t
= build1 (gimple_assign_rhs_code (stmt
),
121 TREE_TYPE (gimple_assign_lhs (stmt
)),
122 gimple_assign_rhs1 (stmt
));
124 case GIMPLE_SINGLE_RHS
:
126 t
= gimple_assign_rhs1 (stmt
);
127 /* Avoid modifying this tree in place below. */
128 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
129 && gimple_location (stmt
) != EXPR_LOCATION (t
))
130 || (gimple_block (stmt
) && currently_expanding_to_rtl
139 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
140 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
146 #ifndef STACK_ALIGNMENT_NEEDED
147 #define STACK_ALIGNMENT_NEEDED 1
150 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
152 /* Choose either CUR or NEXT as the leader DECL for a partition.
153 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
154 out of the same user variable being in multiple partitions (this is
155 less likely for compiler-introduced temps). */
158 leader_merge (tree cur
, tree next
)
160 if (cur
== NULL
|| cur
== next
)
163 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
166 if (DECL_P (next
) && DECL_IGNORED_P (next
))
172 /* Associate declaration T with storage space X. If T is no
173 SSA name this is exactly SET_DECL_RTL, otherwise make the
174 partition of T associated with X. */
176 set_rtl (tree t
, rtx x
)
178 gcc_checking_assert (!x
179 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
180 || (use_register_for_decl (t
)
182 || (GET_CODE (x
) == CONCAT
183 && (REG_P (XEXP (x
, 0))
184 || SUBREG_P (XEXP (x
, 0)))
185 && (REG_P (XEXP (x
, 1))
186 || SUBREG_P (XEXP (x
, 1))))
187 /* We need to accept PARALLELs for RESUT_DECLs
188 because of vector types with BLKmode returned
189 in multiple registers, but they are supposed
190 to be uncoalesced. */
191 || (GET_CODE (x
) == PARALLEL
193 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
194 && (GET_MODE (x
) == BLKmode
195 || !flag_tree_coalesce_vars
)))
196 : (MEM_P (x
) || x
== pc_rtx
197 || (GET_CODE (x
) == CONCAT
198 && MEM_P (XEXP (x
, 0))
199 && MEM_P (XEXP (x
, 1))))));
200 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
201 RESULT_DECLs has the expected mode. For memory, we accept
202 unpromoted modes, since that's what we're likely to get. For
203 PARM_DECLs and RESULT_DECLs, we'll have been called by
204 set_parm_rtl, which will give us the default def, so we don't
205 have to compute it ourselves. For RESULT_DECLs, we accept mode
206 mismatches too, as long as we have BLKmode or are not coalescing
207 across variables, so that we don't reject BLKmode PARALLELs or
209 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
211 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
212 && (promote_ssa_mode (t
, NULL
) == BLKmode
213 || !flag_tree_coalesce_vars
))
214 || !use_register_for_decl (t
)
215 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
220 tree cur
= NULL_TREE
;
228 else if (SUBREG_P (xm
))
230 gcc_assert (subreg_lowpart_p (xm
));
231 xm
= SUBREG_REG (xm
);
234 else if (GET_CODE (xm
) == CONCAT
)
239 else if (GET_CODE (xm
) == PARALLEL
)
241 xm
= XVECEXP (xm
, 0, 0);
242 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
246 else if (xm
== pc_rtx
)
251 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
256 set_mem_attributes (x
,
257 next
&& TREE_CODE (next
) == SSA_NAME
261 set_reg_attrs_for_decl_rtl (next
, x
);
265 if (TREE_CODE (t
) == SSA_NAME
)
267 int part
= var_to_partition (SA
.map
, t
);
268 if (part
!= NO_PARTITION
)
270 if (SA
.partition_to_pseudo
[part
])
271 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
272 else if (x
!= pc_rtx
)
273 SA
.partition_to_pseudo
[part
] = x
;
275 /* For the benefit of debug information at -O0 (where
276 vartracking doesn't run) record the place also in the base
277 DECL. For PARMs and RESULTs, do so only when setting the
279 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
280 && (VAR_P (SSA_NAME_VAR (t
))
281 || SSA_NAME_IS_DEFAULT_DEF (t
)))
283 tree var
= SSA_NAME_VAR (t
);
284 /* If we don't yet have something recorded, just record it now. */
285 if (!DECL_RTL_SET_P (var
))
286 SET_DECL_RTL (var
, x
);
287 /* If we have it set already to "multiple places" don't
289 else if (DECL_RTL (var
) == pc_rtx
)
291 /* If we have something recorded and it's not the same place
292 as we want to record now, we have multiple partitions for the
293 same base variable, with different places. We can't just
294 randomly chose one, hence we have to say that we don't know.
295 This only happens with optimization, and there var-tracking
296 will figure out the right thing. */
297 else if (DECL_RTL (var
) != x
)
298 SET_DECL_RTL (var
, pc_rtx
);
305 /* This structure holds data relevant to one variable that will be
306 placed in a stack slot. */
313 /* Initially, the size of the variable. Later, the size of the partition,
314 if this variable becomes it's partition's representative. */
317 /* The *byte* alignment required for this variable. Or as, with the
318 size, the alignment for this partition. */
321 /* The partition representative. */
322 size_t representative
;
324 /* The next stack variable in the partition, or EOC. */
327 /* The numbers of conflicting stack variables. */
331 #define EOC ((size_t)-1)
333 /* We have an array of such objects while deciding allocation. */
334 static class stack_var
*stack_vars
;
335 static size_t stack_vars_alloc
;
336 static size_t stack_vars_num
;
337 static hash_map
<tree
, size_t> *decl_to_stack_part
;
339 /* Conflict bitmaps go on this obstack. This allows us to destroy
340 all of them in one big sweep. */
341 static bitmap_obstack stack_var_bitmap_obstack
;
343 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
344 is non-decreasing. */
345 static size_t *stack_vars_sorted
;
347 /* The phase of the stack frame. This is the known misalignment of
348 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
349 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
350 static int frame_phase
;
352 /* Used during expand_used_vars to remember if we saw any decls for
353 which we'd like to enable stack smashing protection. */
354 static bool has_protected_decls
;
356 /* Used during expand_used_vars. Remember if we say a character buffer
357 smaller than our cutoff threshold. Used for -Wstack-protector. */
358 static bool has_short_buffer
;
360 /* Compute the byte alignment to use for DECL. Ignore alignment
361 we can't do with expected alignment of the stack boundary. */
364 align_local_variable (tree decl
, bool really_expand
)
368 if (TREE_CODE (decl
) == SSA_NAME
)
369 align
= TYPE_ALIGN (TREE_TYPE (decl
));
372 align
= LOCAL_DECL_ALIGNMENT (decl
);
373 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
374 That is done before IPA and could bump alignment based on host
375 backend even for offloaded code which wants different
376 LOCAL_DECL_ALIGNMENT. */
378 SET_DECL_ALIGN (decl
, align
);
380 return align
/ BITS_PER_UNIT
;
383 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
384 down otherwise. Return truncated BASE value. */
386 static inline unsigned HOST_WIDE_INT
387 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
389 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
392 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
393 Return the frame offset. */
396 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
398 poly_int64 offset
, new_frame_offset
;
400 if (FRAME_GROWS_DOWNWARD
)
403 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
404 align
) + frame_phase
;
405 offset
= new_frame_offset
;
410 = aligned_upper_bound (frame_offset
- frame_phase
,
411 align
) + frame_phase
;
412 offset
= new_frame_offset
;
413 new_frame_offset
+= size
;
415 frame_offset
= new_frame_offset
;
417 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
418 frame_offset
= offset
= 0;
423 /* Accumulate DECL into STACK_VARS. */
426 add_stack_var (tree decl
, bool really_expand
)
430 if (stack_vars_num
>= stack_vars_alloc
)
432 if (stack_vars_alloc
)
433 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
435 stack_vars_alloc
= 32;
437 = XRESIZEVEC (class stack_var
, stack_vars
, stack_vars_alloc
);
439 if (!decl_to_stack_part
)
440 decl_to_stack_part
= new hash_map
<tree
, size_t>;
442 v
= &stack_vars
[stack_vars_num
];
443 decl_to_stack_part
->put (decl
, stack_vars_num
);
446 tree size
= TREE_CODE (decl
) == SSA_NAME
447 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
448 : DECL_SIZE_UNIT (decl
);
449 v
->size
= tree_to_poly_uint64 (size
);
450 /* Ensure that all variables have size, so that &a != &b for any two
451 variables that are simultaneously live. */
452 if (known_eq (v
->size
, 0U))
454 v
->alignb
= align_local_variable (decl
, really_expand
);
455 /* An alignment of zero can mightily confuse us later. */
456 gcc_assert (v
->alignb
!= 0);
458 /* All variables are initially in their own partition. */
459 v
->representative
= stack_vars_num
;
462 /* All variables initially conflict with no other. */
465 /* Ensure that this decl doesn't get put onto the list twice. */
466 set_rtl (decl
, pc_rtx
);
471 /* Make the decls associated with luid's X and Y conflict. */
474 add_stack_var_conflict (size_t x
, size_t y
)
476 class stack_var
*a
= &stack_vars
[x
];
477 class stack_var
*b
= &stack_vars
[y
];
481 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
483 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
484 bitmap_set_bit (a
->conflicts
, y
);
485 bitmap_set_bit (b
->conflicts
, x
);
488 /* Check whether the decls associated with luid's X and Y conflict. */
491 stack_var_conflict_p (size_t x
, size_t y
)
493 class stack_var
*a
= &stack_vars
[x
];
494 class stack_var
*b
= &stack_vars
[y
];
497 /* Partitions containing an SSA name result from gimple registers
498 with things like unsupported modes. They are top-level and
499 hence conflict with everything else. */
500 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
503 if (!a
->conflicts
|| !b
->conflicts
)
505 return bitmap_bit_p (a
->conflicts
, y
);
508 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
509 enter its partition number into bitmap DATA. */
512 visit_op (gimple
*, tree op
, tree
, void *data
)
514 bitmap active
= (bitmap
)data
;
515 op
= get_base_address (op
);
518 && DECL_RTL_IF_SET (op
) == pc_rtx
)
520 size_t *v
= decl_to_stack_part
->get (op
);
522 bitmap_set_bit (active
, *v
);
527 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
528 record conflicts between it and all currently active other partitions
532 visit_conflict (gimple
*, tree op
, tree
, void *data
)
534 bitmap active
= (bitmap
)data
;
535 op
= get_base_address (op
);
538 && DECL_RTL_IF_SET (op
) == pc_rtx
)
540 size_t *v
= decl_to_stack_part
->get (op
);
541 if (v
&& bitmap_set_bit (active
, *v
))
546 gcc_assert (num
< stack_vars_num
);
547 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
548 add_stack_var_conflict (num
, i
);
554 /* Helper routine for add_scope_conflicts, calculating the active partitions
555 at the end of BB, leaving the result in WORK. We're called to generate
556 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
560 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
564 gimple_stmt_iterator gsi
;
565 walk_stmt_load_store_addr_fn visit
;
568 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
569 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
573 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
575 gimple
*stmt
= gsi_stmt (gsi
);
576 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
578 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
580 gimple
*stmt
= gsi_stmt (gsi
);
582 if (gimple_clobber_p (stmt
))
584 tree lhs
= gimple_assign_lhs (stmt
);
586 /* Nested function lowering might introduce LHSs
587 that are COMPONENT_REFs. */
590 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
591 && (v
= decl_to_stack_part
->get (lhs
)))
592 bitmap_clear_bit (work
, *v
);
594 else if (!is_gimple_debug (stmt
))
597 && visit
== visit_op
)
599 /* If this is the first real instruction in this BB we need
600 to add conflicts for everything live at this point now.
601 Unlike classical liveness for named objects we can't
602 rely on seeing a def/use of the names we're interested in.
603 There might merely be indirect loads/stores. We'd not add any
604 conflicts for such partitions. */
607 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
609 class stack_var
*a
= &stack_vars
[i
];
611 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
612 bitmap_ior_into (a
->conflicts
, work
);
614 visit
= visit_conflict
;
616 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
621 /* Generate stack partition conflicts between all partitions that are
622 simultaneously live. */
625 add_scope_conflicts (void)
629 bitmap work
= BITMAP_ALLOC (NULL
);
633 /* We approximate the live range of a stack variable by taking the first
634 mention of its name as starting point(s), and by the end-of-scope
635 death clobber added by gimplify as ending point(s) of the range.
636 This overapproximates in the case we for instance moved an address-taken
637 operation upward, without also moving a dereference to it upwards.
638 But it's conservatively correct as a variable never can hold values
639 before its name is mentioned at least once.
641 We then do a mostly classical bitmap liveness algorithm. */
643 FOR_ALL_BB_FN (bb
, cfun
)
644 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
646 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
647 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
654 for (i
= 0; i
< n_bbs
; i
++)
657 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
658 active
= (bitmap
)bb
->aux
;
659 add_scope_conflicts_1 (bb
, work
, false);
660 if (bitmap_ior_into (active
, work
))
665 FOR_EACH_BB_FN (bb
, cfun
)
666 add_scope_conflicts_1 (bb
, work
, true);
670 FOR_ALL_BB_FN (bb
, cfun
)
671 BITMAP_FREE (bb
->aux
);
674 /* A subroutine of partition_stack_vars. A comparison function for qsort,
675 sorting an array of indices by the properties of the object. */
678 stack_var_cmp (const void *a
, const void *b
)
680 size_t ia
= *(const size_t *)a
;
681 size_t ib
= *(const size_t *)b
;
682 unsigned int aligna
= stack_vars
[ia
].alignb
;
683 unsigned int alignb
= stack_vars
[ib
].alignb
;
684 poly_int64 sizea
= stack_vars
[ia
].size
;
685 poly_int64 sizeb
= stack_vars
[ib
].size
;
686 tree decla
= stack_vars
[ia
].decl
;
687 tree declb
= stack_vars
[ib
].decl
;
689 unsigned int uida
, uidb
;
691 /* Primary compare on "large" alignment. Large comes first. */
692 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
693 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
694 if (largea
!= largeb
)
695 return (int)largeb
- (int)largea
;
697 /* Secondary compare on size, decreasing */
698 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
702 /* Tertiary compare on true alignment, decreasing. */
708 /* Final compare on ID for sort stability, increasing.
709 Two SSA names are compared by their version, SSA names come before
710 non-SSA names, and two normal decls are compared by their DECL_UID. */
711 if (TREE_CODE (decla
) == SSA_NAME
)
713 if (TREE_CODE (declb
) == SSA_NAME
)
714 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
718 else if (TREE_CODE (declb
) == SSA_NAME
)
721 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
729 struct part_traits
: unbounded_int_hashmap_traits
<size_t, bitmap
> {};
730 typedef hash_map
<size_t, bitmap
, part_traits
> part_hashmap
;
732 /* If the points-to solution *PI points to variables that are in a partition
733 together with other variables add all partition members to the pointed-to
737 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
738 part_hashmap
*decls_to_partitions
,
739 hash_set
<bitmap
> *visited
, bitmap temp
)
747 /* The pointed-to vars bitmap is shared, it is enough to
749 || visited
->add (pt
->vars
))
754 /* By using a temporary bitmap to store all members of the partitions
755 we have to add we make sure to visit each of the partitions only
757 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
759 || !bitmap_bit_p (temp
, i
))
760 && (part
= decls_to_partitions
->get (i
)))
761 bitmap_ior_into (temp
, *part
);
762 if (!bitmap_empty_p (temp
))
763 bitmap_ior_into (pt
->vars
, temp
);
766 /* Update points-to sets based on partition info, so we can use them on RTL.
767 The bitmaps representing stack partitions will be saved until expand,
768 where partitioned decls used as bases in memory expressions will be
772 update_alias_info_with_stack_vars (void)
774 part_hashmap
*decls_to_partitions
= NULL
;
776 tree var
= NULL_TREE
;
778 for (i
= 0; i
< stack_vars_num
; i
++)
782 struct ptr_info_def
*pi
;
784 /* Not interested in partitions with single variable. */
785 if (stack_vars
[i
].representative
!= i
786 || stack_vars
[i
].next
== EOC
)
789 if (!decls_to_partitions
)
791 decls_to_partitions
= new part_hashmap
;
792 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
795 /* Create an SSA_NAME that points to the partition for use
796 as base during alias-oracle queries on RTL for bases that
797 have been partitioned. */
798 if (var
== NULL_TREE
)
799 var
= create_tmp_var (ptr_type_node
);
800 name
= make_ssa_name (var
);
802 /* Create bitmaps representing partitions. They will be used for
803 points-to sets later, so use GGC alloc. */
804 part
= BITMAP_GGC_ALLOC ();
805 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
807 tree decl
= stack_vars
[j
].decl
;
808 unsigned int uid
= DECL_PT_UID (decl
);
809 bitmap_set_bit (part
, uid
);
810 decls_to_partitions
->put (uid
, part
);
811 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
812 if (TREE_ADDRESSABLE (decl
))
813 TREE_ADDRESSABLE (name
) = 1;
816 /* Make the SSA name point to all partition members. */
817 pi
= get_ptr_info (name
);
818 pt_solution_set (&pi
->pt
, part
, false);
821 /* Make all points-to sets that contain one member of a partition
822 contain all members of the partition. */
823 if (decls_to_partitions
)
827 hash_set
<bitmap
> visited
;
828 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
830 FOR_EACH_SSA_NAME (i
, name
, cfun
)
832 struct ptr_info_def
*pi
;
834 if (POINTER_TYPE_P (TREE_TYPE (name
))
835 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
836 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
840 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
841 decls_to_partitions
, &visited
, temp
);
843 delete decls_to_partitions
;
848 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
849 partitioning algorithm. Partitions A and B are known to be non-conflicting.
850 Merge them into a single partition A. */
853 union_stack_vars (size_t a
, size_t b
)
855 class stack_var
*vb
= &stack_vars
[b
];
859 gcc_assert (stack_vars
[b
].next
== EOC
);
860 /* Add B to A's partition. */
861 stack_vars
[b
].next
= stack_vars
[a
].next
;
862 stack_vars
[b
].representative
= a
;
863 stack_vars
[a
].next
= b
;
865 /* Update the required alignment of partition A to account for B. */
866 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
867 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
869 /* Update the interference graph and merge the conflicts. */
872 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
873 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
874 BITMAP_FREE (vb
->conflicts
);
878 /* A subroutine of expand_used_vars. Binpack the variables into
879 partitions constrained by the interference graph. The overall
880 algorithm used is as follows:
882 Sort the objects by size in descending order.
887 Look for the largest non-conflicting object B with size <= S.
894 partition_stack_vars (void)
896 size_t si
, sj
, n
= stack_vars_num
;
898 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
899 for (si
= 0; si
< n
; ++si
)
900 stack_vars_sorted
[si
] = si
;
905 qsort (stack_vars_sorted
, n
, sizeof (size_t), stack_var_cmp
);
907 for (si
= 0; si
< n
; ++si
)
909 size_t i
= stack_vars_sorted
[si
];
910 unsigned int ialign
= stack_vars
[i
].alignb
;
911 poly_int64 isize
= stack_vars
[i
].size
;
913 /* Ignore objects that aren't partition representatives. If we
914 see a var that is not a partition representative, it must
915 have been merged earlier. */
916 if (stack_vars
[i
].representative
!= i
)
919 for (sj
= si
+ 1; sj
< n
; ++sj
)
921 size_t j
= stack_vars_sorted
[sj
];
922 unsigned int jalign
= stack_vars
[j
].alignb
;
923 poly_int64 jsize
= stack_vars
[j
].size
;
925 /* Ignore objects that aren't partition representatives. */
926 if (stack_vars
[j
].representative
!= j
)
929 /* Do not mix objects of "small" (supported) alignment
930 and "large" (unsupported) alignment. */
931 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
932 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
935 /* For Address Sanitizer do not mix objects with different
936 sizes, as the shorter vars wouldn't be adequately protected.
937 Don't do that for "large" (unsupported) alignment objects,
938 those aren't protected anyway. */
939 if (asan_sanitize_stack_p ()
940 && maybe_ne (isize
, jsize
)
941 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
944 /* Ignore conflicting objects. */
945 if (stack_var_conflict_p (i
, j
))
948 /* UNION the objects, placing J at OFFSET. */
949 union_stack_vars (i
, j
);
953 update_alias_info_with_stack_vars ();
956 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
959 dump_stack_var_partition (void)
961 size_t si
, i
, j
, n
= stack_vars_num
;
963 for (si
= 0; si
< n
; ++si
)
965 i
= stack_vars_sorted
[si
];
967 /* Skip variables that aren't partition representatives, for now. */
968 if (stack_vars
[i
].representative
!= i
)
971 fprintf (dump_file
, "Partition %lu: size ", (unsigned long) i
);
972 print_dec (stack_vars
[i
].size
, dump_file
);
973 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
975 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
977 fputc ('\t', dump_file
);
978 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
980 fputc ('\n', dump_file
);
984 /* Assign rtl to DECL at BASE + OFFSET. */
987 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
993 /* If this fails, we've overflowed the stack frame. Error nicely? */
994 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
996 x
= plus_constant (Pmode
, base
, offset
);
997 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
998 ? TYPE_MODE (TREE_TYPE (decl
))
999 : DECL_MODE (SSAVAR (decl
)), x
);
1001 if (TREE_CODE (decl
) != SSA_NAME
)
1003 /* Set alignment we actually gave this decl if it isn't an SSA name.
1004 If it is we generate stack slots only accidentally so it isn't as
1005 important, we'll simply use the alignment that is already set. */
1006 if (base
== virtual_stack_vars_rtx
)
1007 offset
-= frame_phase
;
1008 align
= known_alignment (offset
);
1009 align
*= BITS_PER_UNIT
;
1010 if (align
== 0 || align
> base_align
)
1013 /* One would think that we could assert that we're not decreasing
1014 alignment here, but (at least) the i386 port does exactly this
1015 via the MINIMUM_ALIGNMENT hook. */
1017 SET_DECL_ALIGN (decl
, align
);
1018 DECL_USER_ALIGN (decl
) = 0;
1024 class stack_vars_data
1027 /* Vector of offset pairs, always end of some padding followed
1028 by start of the padding that needs Address Sanitizer protection.
1029 The vector is in reversed, highest offset pairs come first. */
1030 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1032 /* Vector of partition representative decls in between the paddings. */
1033 auto_vec
<tree
> asan_decl_vec
;
1035 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1038 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1039 unsigned int asan_alignb
;
1042 /* A subroutine of expand_used_vars. Give each partition representative
1043 a unique location within the stack frame. Update each partition member
1044 with that location. */
1047 expand_stack_vars (bool (*pred
) (size_t), class stack_vars_data
*data
)
1049 size_t si
, i
, j
, n
= stack_vars_num
;
1050 poly_uint64 large_size
= 0, large_alloc
= 0;
1051 rtx large_base
= NULL
;
1052 unsigned large_align
= 0;
1053 bool large_allocation_done
= false;
1056 /* Determine if there are any variables requiring "large" alignment.
1057 Since these are dynamically allocated, we only process these if
1058 no predicate involved. */
1059 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1060 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1062 /* Find the total size of these variables. */
1063 for (si
= 0; si
< n
; ++si
)
1067 i
= stack_vars_sorted
[si
];
1068 alignb
= stack_vars
[i
].alignb
;
1070 /* All "large" alignment decls come before all "small" alignment
1071 decls, but "large" alignment decls are not sorted based on
1072 their alignment. Increase large_align to track the largest
1073 required alignment. */
1074 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1075 large_align
= alignb
* BITS_PER_UNIT
;
1077 /* Stop when we get to the first decl with "small" alignment. */
1078 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1081 /* Skip variables that aren't partition representatives. */
1082 if (stack_vars
[i
].representative
!= i
)
1085 /* Skip variables that have already had rtl assigned. See also
1086 add_stack_var where we perpetrate this pc_rtx hack. */
1087 decl
= stack_vars
[i
].decl
;
1088 if (TREE_CODE (decl
) == SSA_NAME
1089 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1090 : DECL_RTL (decl
) != pc_rtx
)
1093 large_size
= aligned_upper_bound (large_size
, alignb
);
1094 large_size
+= stack_vars
[i
].size
;
1098 for (si
= 0; si
< n
; ++si
)
1101 unsigned base_align
, alignb
;
1104 i
= stack_vars_sorted
[si
];
1106 /* Skip variables that aren't partition representatives, for now. */
1107 if (stack_vars
[i
].representative
!= i
)
1110 /* Skip variables that have already had rtl assigned. See also
1111 add_stack_var where we perpetrate this pc_rtx hack. */
1112 decl
= stack_vars
[i
].decl
;
1113 if (TREE_CODE (decl
) == SSA_NAME
1114 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1115 : DECL_RTL (decl
) != pc_rtx
)
1118 /* Check the predicate to see whether this variable should be
1119 allocated in this pass. */
1120 if (pred
&& !pred (i
))
1123 alignb
= stack_vars
[i
].alignb
;
1124 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1126 base
= virtual_stack_vars_rtx
;
1127 /* ASAN description strings don't yet have a syntax for expressing
1128 polynomial offsets. */
1129 HOST_WIDE_INT prev_offset
;
1130 if (asan_sanitize_stack_p ()
1132 && frame_offset
.is_constant (&prev_offset
)
1133 && stack_vars
[i
].size
.is_constant ())
1135 if (data
->asan_vec
.is_empty ())
1137 alloc_stack_frame_space (0, ASAN_RED_ZONE_SIZE
);
1138 prev_offset
= frame_offset
.to_constant ();
1140 prev_offset
= align_base (prev_offset
,
1141 ASAN_MIN_RED_ZONE_SIZE
,
1142 !FRAME_GROWS_DOWNWARD
);
1143 tree repr_decl
= NULL_TREE
;
1144 unsigned HOST_WIDE_INT size
1145 = asan_var_and_redzone_size (stack_vars
[i
].size
.to_constant ());
1146 if (data
->asan_vec
.is_empty ())
1147 size
= MAX (size
, ASAN_RED_ZONE_SIZE
);
1149 unsigned HOST_WIDE_INT alignment
= MAX (alignb
,
1150 ASAN_MIN_RED_ZONE_SIZE
);
1151 offset
= alloc_stack_frame_space (size
, alignment
);
1153 data
->asan_vec
.safe_push (prev_offset
);
1154 /* Allocating a constant amount of space from a constant
1155 starting offset must give a constant result. */
1156 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1158 /* Find best representative of the partition.
1159 Prefer those with DECL_NAME, even better
1160 satisfying asan_protect_stack_decl predicate. */
1161 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1162 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1163 && DECL_NAME (stack_vars
[j
].decl
))
1165 repr_decl
= stack_vars
[j
].decl
;
1168 else if (repr_decl
== NULL_TREE
1169 && DECL_P (stack_vars
[j
].decl
)
1170 && DECL_NAME (stack_vars
[j
].decl
))
1171 repr_decl
= stack_vars
[j
].decl
;
1172 if (repr_decl
== NULL_TREE
)
1173 repr_decl
= stack_vars
[i
].decl
;
1174 data
->asan_decl_vec
.safe_push (repr_decl
);
1176 /* Make sure a representative is unpoison if another
1177 variable in the partition is handled by
1178 use-after-scope sanitization. */
1179 if (asan_handled_variables
!= NULL
1180 && !asan_handled_variables
->contains (repr_decl
))
1182 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1183 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1186 asan_handled_variables
->add (repr_decl
);
1189 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1190 if (data
->asan_base
== NULL
)
1191 data
->asan_base
= gen_reg_rtx (Pmode
);
1192 base
= data
->asan_base
;
1194 if (!STRICT_ALIGNMENT
)
1195 base_align
= crtl
->max_used_stack_slot_alignment
;
1197 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1198 GET_MODE_ALIGNMENT (SImode
)
1199 << ASAN_SHADOW_SHIFT
);
1203 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1204 base_align
= crtl
->max_used_stack_slot_alignment
;
1209 /* Large alignment is only processed in the last pass. */
1213 /* If there were any variables requiring "large" alignment, allocate
1215 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1218 rtx large_allocsize
;
1220 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1221 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1222 loffset
= alloc_stack_frame_space
1223 (rtx_to_poly_int64 (large_allocsize
),
1224 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1225 large_base
= get_dynamic_stack_base (loffset
, large_align
);
1226 large_allocation_done
= true;
1228 gcc_assert (large_base
!= NULL
);
1230 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1231 offset
= large_alloc
;
1232 large_alloc
+= stack_vars
[i
].size
;
1235 base_align
= large_align
;
1238 /* Create rtl for each variable based on their location within the
1240 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1242 expand_one_stack_var_at (stack_vars
[j
].decl
,
1248 gcc_assert (known_eq (large_alloc
, large_size
));
1251 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1253 account_stack_vars (void)
1255 size_t si
, j
, i
, n
= stack_vars_num
;
1256 poly_uint64 size
= 0;
1258 for (si
= 0; si
< n
; ++si
)
1260 i
= stack_vars_sorted
[si
];
1262 /* Skip variables that aren't partition representatives, for now. */
1263 if (stack_vars
[i
].representative
!= i
)
1266 size
+= stack_vars
[i
].size
;
1267 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1268 set_rtl (stack_vars
[j
].decl
, NULL
);
1273 /* Record the RTL assignment X for the default def of PARM. */
1276 set_parm_rtl (tree parm
, rtx x
)
1278 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1279 || TREE_CODE (parm
) == RESULT_DECL
);
1281 if (x
&& !MEM_P (x
))
1283 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1284 TYPE_MODE (TREE_TYPE (parm
)),
1285 TYPE_ALIGN (TREE_TYPE (parm
)));
1287 /* If the variable alignment is very large we'll dynamicaly
1288 allocate it, which means that in-frame portion is just a
1289 pointer. ??? We've got a pseudo for sure here, do we
1290 actually dynamically allocate its spilling area if needed?
1291 ??? Isn't it a problem when Pmode alignment also exceeds
1292 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1293 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1294 align
= GET_MODE_ALIGNMENT (Pmode
);
1296 record_alignment_for_reg_var (align
);
1299 tree ssa
= ssa_default_def (cfun
, parm
);
1301 return set_rtl (parm
, x
);
1303 int part
= var_to_partition (SA
.map
, ssa
);
1304 gcc_assert (part
!= NO_PARTITION
);
1306 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1307 gcc_assert (changed
);
1310 gcc_assert (DECL_RTL (parm
) == x
);
1313 /* A subroutine of expand_one_var. Called to immediately assign rtl
1314 to a variable to be allocated in the stack frame. */
1317 expand_one_stack_var_1 (tree var
)
1321 unsigned byte_align
;
1323 if (TREE_CODE (var
) == SSA_NAME
)
1325 tree type
= TREE_TYPE (var
);
1326 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1327 byte_align
= TYPE_ALIGN_UNIT (type
);
1331 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1332 byte_align
= align_local_variable (var
, true);
1335 /* We handle highly aligned variables in expand_stack_vars. */
1336 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1338 offset
= alloc_stack_frame_space (size
, byte_align
);
1340 expand_one_stack_var_at (var
, virtual_stack_vars_rtx
,
1341 crtl
->max_used_stack_slot_alignment
, offset
);
1344 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1345 already assigned some MEM. */
1348 expand_one_stack_var (tree var
)
1350 if (TREE_CODE (var
) == SSA_NAME
)
1352 int part
= var_to_partition (SA
.map
, var
);
1353 if (part
!= NO_PARTITION
)
1355 rtx x
= SA
.partition_to_pseudo
[part
];
1357 gcc_assert (MEM_P (x
));
1362 return expand_one_stack_var_1 (var
);
1365 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1366 that will reside in a hard register. */
1369 expand_one_hard_reg_var (tree var
)
1371 rest_of_decl_compilation (var
, 0, 0);
1374 /* Record the alignment requirements of some variable assigned to a
1378 record_alignment_for_reg_var (unsigned int align
)
1380 if (SUPPORTS_STACK_ALIGNMENT
1381 && crtl
->stack_alignment_estimated
< align
)
1383 /* stack_alignment_estimated shouldn't change after stack
1384 realign decision made */
1385 gcc_assert (!crtl
->stack_realign_processed
);
1386 crtl
->stack_alignment_estimated
= align
;
1389 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1390 So here we only make sure stack_alignment_needed >= align. */
1391 if (crtl
->stack_alignment_needed
< align
)
1392 crtl
->stack_alignment_needed
= align
;
1393 if (crtl
->max_used_stack_slot_alignment
< align
)
1394 crtl
->max_used_stack_slot_alignment
= align
;
1397 /* Create RTL for an SSA partition. */
1400 expand_one_ssa_partition (tree var
)
1402 int part
= var_to_partition (SA
.map
, var
);
1403 gcc_assert (part
!= NO_PARTITION
);
1405 if (SA
.partition_to_pseudo
[part
])
1408 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1409 TYPE_MODE (TREE_TYPE (var
)),
1410 TYPE_ALIGN (TREE_TYPE (var
)));
1412 /* If the variable alignment is very large we'll dynamicaly allocate
1413 it, which means that in-frame portion is just a pointer. */
1414 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1415 align
= GET_MODE_ALIGNMENT (Pmode
);
1417 record_alignment_for_reg_var (align
);
1419 if (!use_register_for_decl (var
))
1421 if (defer_stack_allocation (var
, true))
1422 add_stack_var (var
, true);
1424 expand_one_stack_var_1 (var
);
1428 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1429 rtx x
= gen_reg_rtx (reg_mode
);
1433 /* For a promoted variable, X will not be used directly but wrapped in a
1434 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1435 will assume that its upper bits can be inferred from its lower bits.
1436 Therefore, if X isn't initialized on every path from the entry, then
1437 we must do it manually in order to fulfill the above assumption. */
1438 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1439 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1440 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1443 /* Record the association between the RTL generated for partition PART
1444 and the underlying variable of the SSA_NAME VAR. */
1447 adjust_one_expanded_partition_var (tree var
)
1452 tree decl
= SSA_NAME_VAR (var
);
1454 int part
= var_to_partition (SA
.map
, var
);
1455 if (part
== NO_PARTITION
)
1458 rtx x
= SA
.partition_to_pseudo
[part
];
1467 /* Note if the object is a user variable. */
1468 if (decl
&& !DECL_ARTIFICIAL (decl
))
1471 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1472 mark_reg_pointer (x
, get_pointer_alignment (var
));
1475 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1476 that will reside in a pseudo register. */
1479 expand_one_register_var (tree var
)
1481 if (TREE_CODE (var
) == SSA_NAME
)
1483 int part
= var_to_partition (SA
.map
, var
);
1484 if (part
!= NO_PARTITION
)
1486 rtx x
= SA
.partition_to_pseudo
[part
];
1488 gcc_assert (REG_P (x
));
1495 tree type
= TREE_TYPE (decl
);
1496 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1497 rtx x
= gen_reg_rtx (reg_mode
);
1501 /* Note if the object is a user variable. */
1502 if (!DECL_ARTIFICIAL (decl
))
1505 if (POINTER_TYPE_P (type
))
1506 mark_reg_pointer (x
, get_pointer_alignment (var
));
1509 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1510 has some associated error, e.g. its type is error-mark. We just need
1511 to pick something that won't crash the rest of the compiler. */
1514 expand_one_error_var (tree var
)
1516 machine_mode mode
= DECL_MODE (var
);
1519 if (mode
== BLKmode
)
1520 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1521 else if (mode
== VOIDmode
)
1524 x
= gen_reg_rtx (mode
);
1526 SET_DECL_RTL (var
, x
);
1529 /* A subroutine of expand_one_var. VAR is a variable that will be
1530 allocated to the local stack frame. Return true if we wish to
1531 add VAR to STACK_VARS so that it will be coalesced with other
1532 variables. Return false to allocate VAR immediately.
1534 This function is used to reduce the number of variables considered
1535 for coalescing, which reduces the size of the quadratic problem. */
1538 defer_stack_allocation (tree var
, bool toplevel
)
1540 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1541 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1542 : DECL_SIZE_UNIT (var
);
1545 /* Whether the variable is small enough for immediate allocation not to be
1546 a problem with regard to the frame size. */
1548 = (poly_int_tree_p (size_unit
, &size
)
1549 && (estimated_poly_value (size
)
1550 < param_min_size_for_stack_sharing
));
1552 /* If stack protection is enabled, *all* stack variables must be deferred,
1553 so that we can re-order the strings to the top of the frame.
1554 Similarly for Address Sanitizer. */
1555 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1558 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1559 ? TYPE_ALIGN (TREE_TYPE (var
))
1562 /* We handle "large" alignment via dynamic allocation. We want to handle
1563 this extra complication in only one place, so defer them. */
1564 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1567 bool ignored
= TREE_CODE (var
) == SSA_NAME
1568 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1569 : DECL_IGNORED_P (var
);
1571 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1572 might be detached from their block and appear at toplevel when we reach
1573 here. We want to coalesce them with variables from other blocks when
1574 the immediate contribution to the frame size would be noticeable. */
1575 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1578 /* Variables declared in the outermost scope automatically conflict
1579 with every other variable. The only reason to want to defer them
1580 at all is that, after sorting, we can more efficiently pack
1581 small variables in the stack frame. Continue to defer at -O2. */
1582 if (toplevel
&& optimize
< 2)
1585 /* Without optimization, *most* variables are allocated from the
1586 stack, which makes the quadratic problem large exactly when we
1587 want compilation to proceed as quickly as possible. On the
1588 other hand, we don't want the function's stack frame size to
1589 get completely out of hand. So we avoid adding scalars and
1590 "small" aggregates to the list at all. */
1591 if (optimize
== 0 && smallish
)
1597 /* A subroutine of expand_used_vars. Expand one variable according to
1598 its flavor. Variables to be placed on the stack are not actually
1599 expanded yet, merely recorded.
1600 When REALLY_EXPAND is false, only add stack values to be allocated.
1601 Return stack usage this variable is supposed to take.
1605 expand_one_var (tree var
, bool toplevel
, bool really_expand
)
1607 unsigned int align
= BITS_PER_UNIT
;
1612 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1614 if (is_global_var (var
))
1617 /* Because we don't know if VAR will be in register or on stack,
1618 we conservatively assume it will be on stack even if VAR is
1619 eventually put into register after RA pass. For non-automatic
1620 variables, which won't be on stack, we collect alignment of
1621 type and ignore user specified alignment. Similarly for
1622 SSA_NAMEs for which use_register_for_decl returns true. */
1623 if (TREE_STATIC (var
)
1624 || DECL_EXTERNAL (var
)
1625 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1626 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1627 TYPE_MODE (TREE_TYPE (var
)),
1628 TYPE_ALIGN (TREE_TYPE (var
)));
1629 else if (DECL_HAS_VALUE_EXPR_P (var
)
1630 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1631 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1632 or variables which were assigned a stack slot already by
1633 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1634 changed from the offset chosen to it. */
1635 align
= crtl
->stack_alignment_estimated
;
1637 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1639 /* If the variable alignment is very large we'll dynamicaly allocate
1640 it, which means that in-frame portion is just a pointer. */
1641 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1642 align
= GET_MODE_ALIGNMENT (Pmode
);
1645 record_alignment_for_reg_var (align
);
1648 if (TREE_CODE (origvar
) == SSA_NAME
)
1650 gcc_assert (!VAR_P (var
)
1651 || (!DECL_EXTERNAL (var
)
1652 && !DECL_HAS_VALUE_EXPR_P (var
)
1653 && !TREE_STATIC (var
)
1654 && TREE_TYPE (var
) != error_mark_node
1655 && !DECL_HARD_REGISTER (var
)
1658 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1660 else if (DECL_EXTERNAL (var
))
1662 else if (DECL_HAS_VALUE_EXPR_P (var
))
1664 else if (TREE_STATIC (var
))
1666 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1668 else if (TREE_TYPE (var
) == error_mark_node
)
1671 expand_one_error_var (var
);
1673 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1677 expand_one_hard_reg_var (var
);
1678 if (!DECL_HARD_REGISTER (var
))
1679 /* Invalid register specification. */
1680 expand_one_error_var (var
);
1683 else if (use_register_for_decl (var
))
1686 expand_one_register_var (origvar
);
1688 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1689 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1691 /* Reject variables which cover more than half of the address-space. */
1694 if (DECL_NONLOCAL_FRAME (var
))
1695 error_at (DECL_SOURCE_LOCATION (current_function_decl
),
1696 "total size of local objects is too large");
1698 error_at (DECL_SOURCE_LOCATION (var
),
1699 "size of variable %q+D is too large", var
);
1700 expand_one_error_var (var
);
1703 else if (defer_stack_allocation (var
, toplevel
))
1704 add_stack_var (origvar
, really_expand
);
1709 if (lookup_attribute ("naked",
1710 DECL_ATTRIBUTES (current_function_decl
)))
1711 error ("cannot allocate stack for variable %q+D, naked function",
1714 expand_one_stack_var (origvar
);
1721 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1722 expanding variables. Those variables that can be put into registers
1723 are allocated pseudos; those that can't are put on the stack.
1725 TOPLEVEL is true if this is the outermost BLOCK. */
1728 expand_used_vars_for_block (tree block
, bool toplevel
)
1732 /* Expand all variables at this level. */
1733 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1735 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1736 || !DECL_NONSHAREABLE (t
)))
1737 expand_one_var (t
, toplevel
, true);
1739 /* Expand all variables at containing levels. */
1740 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1741 expand_used_vars_for_block (t
, false);
1744 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1745 and clear TREE_USED on all local variables. */
1748 clear_tree_used (tree block
)
1752 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1753 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1754 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1755 || !DECL_NONSHAREABLE (t
))
1758 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1759 clear_tree_used (t
);
1763 SPCT_FLAG_DEFAULT
= 1,
1765 SPCT_FLAG_STRONG
= 3,
1766 SPCT_FLAG_EXPLICIT
= 4
1769 /* Examine TYPE and determine a bit mask of the following features. */
1771 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1772 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1773 #define SPCT_HAS_ARRAY 4
1774 #define SPCT_HAS_AGGREGATE 8
1777 stack_protect_classify_type (tree type
)
1779 unsigned int ret
= 0;
1782 switch (TREE_CODE (type
))
1785 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1786 if (t
== char_type_node
1787 || t
== signed_char_type_node
1788 || t
== unsigned_char_type_node
)
1790 unsigned HOST_WIDE_INT max
= param_ssp_buffer_size
;
1791 unsigned HOST_WIDE_INT len
;
1793 if (!TYPE_SIZE_UNIT (type
)
1794 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1797 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1800 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1802 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1805 ret
= SPCT_HAS_ARRAY
;
1809 case QUAL_UNION_TYPE
:
1811 ret
= SPCT_HAS_AGGREGATE
;
1812 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1813 if (TREE_CODE (t
) == FIELD_DECL
)
1814 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1824 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1825 part of the local stack frame. Remember if we ever return nonzero for
1826 any variable in this function. The return value is the phase number in
1827 which the variable should be allocated. */
1830 stack_protect_decl_phase (tree decl
)
1832 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
1835 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
1836 has_short_buffer
= true;
1838 if (flag_stack_protect
== SPCT_FLAG_ALL
1839 || flag_stack_protect
== SPCT_FLAG_STRONG
1840 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
1841 && lookup_attribute ("stack_protect",
1842 DECL_ATTRIBUTES (current_function_decl
))))
1844 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
1845 && !(bits
& SPCT_HAS_AGGREGATE
))
1847 else if (bits
& SPCT_HAS_ARRAY
)
1851 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
1854 has_protected_decls
= true;
1859 /* Two helper routines that check for phase 1 and phase 2. These are used
1860 as callbacks for expand_stack_vars. */
1863 stack_protect_decl_phase_1 (size_t i
)
1865 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
1869 stack_protect_decl_phase_2 (size_t i
)
1871 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
1874 /* And helper function that checks for asan phase (with stack protector
1875 it is phase 3). This is used as callback for expand_stack_vars.
1876 Returns true if any of the vars in the partition need to be protected. */
1879 asan_decl_phase_3 (size_t i
)
1883 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
1885 i
= stack_vars
[i
].next
;
1890 /* Ensure that variables in different stack protection phases conflict
1891 so that they are not merged and share the same stack slot.
1892 Return true if there are any address taken variables. */
1895 add_stack_protection_conflicts (void)
1897 size_t i
, j
, n
= stack_vars_num
;
1898 unsigned char *phase
;
1901 phase
= XNEWVEC (unsigned char, n
);
1902 for (i
= 0; i
< n
; ++i
)
1904 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
1905 if (TREE_ADDRESSABLE (stack_vars
[i
].decl
))
1909 for (i
= 0; i
< n
; ++i
)
1911 unsigned char ph_i
= phase
[i
];
1912 for (j
= i
+ 1; j
< n
; ++j
)
1913 if (ph_i
!= phase
[j
])
1914 add_stack_var_conflict (i
, j
);
1921 /* Create a decl for the guard at the top of the stack frame. */
1924 create_stack_guard (void)
1926 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
1927 VAR_DECL
, NULL
, ptr_type_node
);
1928 TREE_THIS_VOLATILE (guard
) = 1;
1929 TREE_USED (guard
) = 1;
1930 expand_one_stack_var (guard
);
1931 crtl
->stack_protect_guard
= guard
;
1934 /* Prepare for expanding variables. */
1936 init_vars_expansion (void)
1938 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
1939 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
1941 /* A map from decl to stack partition. */
1942 decl_to_stack_part
= new hash_map
<tree
, size_t>;
1944 /* Initialize local stack smashing state. */
1945 has_protected_decls
= false;
1946 has_short_buffer
= false;
1949 /* Free up stack variable graph data. */
1951 fini_vars_expansion (void)
1953 bitmap_obstack_release (&stack_var_bitmap_obstack
);
1955 XDELETEVEC (stack_vars
);
1956 if (stack_vars_sorted
)
1957 XDELETEVEC (stack_vars_sorted
);
1959 stack_vars_sorted
= NULL
;
1960 stack_vars_alloc
= stack_vars_num
= 0;
1961 delete decl_to_stack_part
;
1962 decl_to_stack_part
= NULL
;
1965 /* Make a fair guess for the size of the stack frame of the function
1966 in NODE. This doesn't have to be exact, the result is only used in
1967 the inline heuristics. So we don't want to run the full stack var
1968 packing algorithm (which is quadratic in the number of stack vars).
1969 Instead, we calculate the total size of all stack vars. This turns
1970 out to be a pretty fair estimate -- packing of stack vars doesn't
1971 happen very often. */
1974 estimated_stack_frame_size (struct cgraph_node
*node
)
1976 poly_int64 size
= 0;
1979 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
1983 init_vars_expansion ();
1985 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
1986 if (auto_var_in_fn_p (var
, fn
->decl
))
1987 size
+= expand_one_var (var
, true, false);
1989 if (stack_vars_num
> 0)
1991 /* Fake sorting the stack vars for account_stack_vars (). */
1992 stack_vars_sorted
= XNEWVEC (size_t, stack_vars_num
);
1993 for (i
= 0; i
< stack_vars_num
; ++i
)
1994 stack_vars_sorted
[i
] = i
;
1995 size
+= account_stack_vars ();
1998 fini_vars_expansion ();
2000 return estimated_poly_value (size
);
2003 /* Check if the current function has calls that use a return slot. */
2006 stack_protect_return_slot_p ()
2010 FOR_ALL_BB_FN (bb
, cfun
)
2011 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2012 !gsi_end_p (gsi
); gsi_next (&gsi
))
2014 gimple
*stmt
= gsi_stmt (gsi
);
2015 /* This assumes that calls to internal-only functions never
2016 use a return slot. */
2017 if (is_gimple_call (stmt
)
2018 && !gimple_call_internal_p (stmt
)
2019 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2020 gimple_call_fndecl (stmt
)))
2026 /* Expand all variables used in the function. */
2029 expand_used_vars (void)
2031 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2032 auto_vec
<tree
> maybe_local_decls
;
2033 rtx_insn
*var_end_seq
= NULL
;
2036 bool gen_stack_protect_signal
= false;
2038 /* Compute the phase of the stack frame for this function. */
2040 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2041 int off
= targetm
.starting_frame_offset () % align
;
2042 frame_phase
= off
? align
- off
: 0;
2045 /* Set TREE_USED on all variables in the local_decls. */
2046 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2047 TREE_USED (var
) = 1;
2048 /* Clear TREE_USED on all variables associated with a block scope. */
2049 clear_tree_used (DECL_INITIAL (current_function_decl
));
2051 init_vars_expansion ();
2053 if (targetm
.use_pseudo_pic_reg ())
2054 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2056 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2058 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2061 tree var
= partition_to_var (SA
.map
, i
);
2063 gcc_assert (!virtual_operand_p (var
));
2065 expand_one_ssa_partition (var
);
2068 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2069 gen_stack_protect_signal
= stack_protect_return_slot_p ();
2071 /* At this point all variables on the local_decls with TREE_USED
2072 set are not associated with any block scope. Lay them out. */
2074 len
= vec_safe_length (cfun
->local_decls
);
2075 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2077 bool expand_now
= false;
2079 /* Expanded above already. */
2080 if (is_gimple_reg (var
))
2082 TREE_USED (var
) = 0;
2085 /* We didn't set a block for static or extern because it's hard
2086 to tell the difference between a global variable (re)declared
2087 in a local scope, and one that's really declared there to
2088 begin with. And it doesn't really matter much, since we're
2089 not giving them stack space. Expand them now. */
2090 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2093 /* Expand variables not associated with any block now. Those created by
2094 the optimizers could be live anywhere in the function. Those that
2095 could possibly have been scoped originally and detached from their
2096 block will have their allocation deferred so we coalesce them with
2097 others when optimization is enabled. */
2098 else if (TREE_USED (var
))
2101 /* Finally, mark all variables on the list as used. We'll use
2102 this in a moment when we expand those associated with scopes. */
2103 TREE_USED (var
) = 1;
2106 expand_one_var (var
, true, true);
2109 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2111 rtx rtl
= DECL_RTL_IF_SET (var
);
2113 /* Keep artificial non-ignored vars in cfun->local_decls
2114 chain until instantiate_decls. */
2115 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2116 add_local_decl (cfun
, var
);
2117 else if (rtl
== NULL_RTX
)
2118 /* If rtl isn't set yet, which can happen e.g. with
2119 -fstack-protector, retry before returning from this
2121 maybe_local_decls
.safe_push (var
);
2125 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2127 +-----------------+-----------------+
2128 | ...processed... | ...duplicates...|
2129 +-----------------+-----------------+
2131 +-- LEN points here.
2133 We just want the duplicates, as those are the artificial
2134 non-ignored vars that we want to keep until instantiate_decls.
2135 Move them down and truncate the array. */
2136 if (!vec_safe_is_empty (cfun
->local_decls
))
2137 cfun
->local_decls
->block_remove (0, len
);
2139 /* At this point, all variables within the block tree with TREE_USED
2140 set are actually used by the optimized function. Lay them out. */
2141 expand_used_vars_for_block (outer_block
, true);
2143 if (stack_vars_num
> 0)
2145 bool has_addressable_vars
= false;
2147 add_scope_conflicts ();
2149 /* If stack protection is enabled, we don't share space between
2150 vulnerable data and non-vulnerable data. */
2151 if (flag_stack_protect
!= 0
2152 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2153 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2154 && lookup_attribute ("stack_protect",
2155 DECL_ATTRIBUTES (current_function_decl
)))))
2156 has_addressable_vars
= add_stack_protection_conflicts ();
2158 if (flag_stack_protect
== SPCT_FLAG_STRONG
&& has_addressable_vars
)
2159 gen_stack_protect_signal
= true;
2161 /* Now that we have collected all stack variables, and have computed a
2162 minimal interference graph, attempt to save some stack space. */
2163 partition_stack_vars ();
2165 dump_stack_var_partition ();
2168 switch (flag_stack_protect
)
2171 create_stack_guard ();
2174 case SPCT_FLAG_STRONG
:
2175 if (gen_stack_protect_signal
2176 || cfun
->calls_alloca
2177 || has_protected_decls
2178 || lookup_attribute ("stack_protect",
2179 DECL_ATTRIBUTES (current_function_decl
)))
2180 create_stack_guard ();
2183 case SPCT_FLAG_DEFAULT
:
2184 if (cfun
->calls_alloca
2185 || has_protected_decls
2186 || lookup_attribute ("stack_protect",
2187 DECL_ATTRIBUTES (current_function_decl
)))
2188 create_stack_guard ();
2191 case SPCT_FLAG_EXPLICIT
:
2192 if (lookup_attribute ("stack_protect",
2193 DECL_ATTRIBUTES (current_function_decl
)))
2194 create_stack_guard ();
2201 /* Assign rtl to each variable based on these partitions. */
2202 if (stack_vars_num
> 0)
2204 class stack_vars_data data
;
2206 data
.asan_base
= NULL_RTX
;
2207 data
.asan_alignb
= 0;
2209 /* Reorder decls to be protected by iterating over the variables
2210 array multiple times, and allocating out of each phase in turn. */
2211 /* ??? We could probably integrate this into the qsort we did
2212 earlier, such that we naturally see these variables first,
2213 and thus naturally allocate things in the right order. */
2214 if (has_protected_decls
)
2216 /* Phase 1 contains only character arrays. */
2217 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2219 /* Phase 2 contains other kinds of arrays. */
2220 if (flag_stack_protect
== SPCT_FLAG_ALL
2221 || flag_stack_protect
== SPCT_FLAG_STRONG
2222 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2223 && lookup_attribute ("stack_protect",
2224 DECL_ATTRIBUTES (current_function_decl
))))
2225 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2228 if (asan_sanitize_stack_p ())
2229 /* Phase 3, any partitions that need asan protection
2230 in addition to phase 1 and 2. */
2231 expand_stack_vars (asan_decl_phase_3
, &data
);
2233 /* ASAN description strings don't yet have a syntax for expressing
2234 polynomial offsets. */
2235 HOST_WIDE_INT prev_offset
;
2236 if (!data
.asan_vec
.is_empty ()
2237 && frame_offset
.is_constant (&prev_offset
))
2239 HOST_WIDE_INT offset
, sz
, redzonesz
;
2240 redzonesz
= ASAN_RED_ZONE_SIZE
;
2241 sz
= data
.asan_vec
[0] - prev_offset
;
2242 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2243 && data
.asan_alignb
<= 4096
2244 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2245 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2246 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2247 /* Allocating a constant amount of space from a constant
2248 starting offset must give a constant result. */
2249 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2251 data
.asan_vec
.safe_push (prev_offset
);
2252 data
.asan_vec
.safe_push (offset
);
2253 /* Leave space for alignment if STRICT_ALIGNMENT. */
2254 if (STRICT_ALIGNMENT
)
2255 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2256 << ASAN_SHADOW_SHIFT
)
2257 / BITS_PER_UNIT
, 1);
2260 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2263 data
.asan_vec
.address (),
2264 data
.asan_decl_vec
.address (),
2265 data
.asan_vec
.length ());
2268 expand_stack_vars (NULL
, &data
);
2271 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2272 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2273 virtual_stack_vars_rtx
,
2276 fini_vars_expansion ();
2278 /* If there were any artificial non-ignored vars without rtl
2279 found earlier, see if deferred stack allocation hasn't assigned
2281 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2283 rtx rtl
= DECL_RTL_IF_SET (var
);
2285 /* Keep artificial non-ignored vars in cfun->local_decls
2286 chain until instantiate_decls. */
2287 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2288 add_local_decl (cfun
, var
);
2291 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2292 if (STACK_ALIGNMENT_NEEDED
)
2294 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2295 if (FRAME_GROWS_DOWNWARD
)
2296 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2298 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2305 /* If we need to produce a detailed dump, print the tree representation
2306 for STMT to the dump file. SINCE is the last RTX after which the RTL
2307 generated for STMT should have been appended. */
2310 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2312 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2314 fprintf (dump_file
, "\n;; ");
2315 print_gimple_stmt (dump_file
, stmt
, 0,
2316 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2317 fprintf (dump_file
, "\n");
2319 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2323 /* Maps the blocks that do not contain tree labels to rtx labels. */
2325 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2327 /* Returns the label_rtx expression for a label starting basic block BB. */
2329 static rtx_code_label
*
2330 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2332 gimple_stmt_iterator gsi
;
2335 if (bb
->flags
& BB_RTL
)
2336 return block_label (bb
);
2338 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2342 /* Find the tree label if it is present. */
2344 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
2348 lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
));
2352 lab
= gimple_label_label (lab_stmt
);
2353 if (DECL_NONLOCAL (lab
))
2356 return jump_target_rtx (lab
);
2359 rtx_code_label
*l
= gen_label_rtx ();
2360 lab_rtx_for_bb
->put (bb
, l
);
2365 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2366 of a basic block where we just expanded the conditional at the end,
2367 possibly clean up the CFG and instruction sequence. LAST is the
2368 last instruction before the just emitted jump sequence. */
2371 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2373 /* Special case: when jumpif decides that the condition is
2374 trivial it emits an unconditional jump (and the necessary
2375 barrier). But we still have two edges, the fallthru one is
2376 wrong. purge_dead_edges would clean this up later. Unfortunately
2377 we have to insert insns (and split edges) before
2378 find_many_sub_basic_blocks and hence before purge_dead_edges.
2379 But splitting edges might create new blocks which depend on the
2380 fact that if there are two edges there's no barrier. So the
2381 barrier would get lost and verify_flow_info would ICE. Instead
2382 of auditing all edge splitters to care for the barrier (which
2383 normally isn't there in a cleaned CFG), fix it here. */
2384 if (BARRIER_P (get_last_insn ()))
2388 /* Now, we have a single successor block, if we have insns to
2389 insert on the remaining edge we potentially will insert
2390 it at the end of this block (if the dest block isn't feasible)
2391 in order to avoid splitting the edge. This insertion will take
2392 place in front of the last jump. But we might have emitted
2393 multiple jumps (conditional and one unconditional) to the
2394 same destination. Inserting in front of the last one then
2395 is a problem. See PR 40021. We fix this by deleting all
2396 jumps except the last unconditional one. */
2397 insn
= PREV_INSN (get_last_insn ());
2398 /* Make sure we have an unconditional jump. Otherwise we're
2400 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2401 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2403 insn
= PREV_INSN (insn
);
2404 if (JUMP_P (NEXT_INSN (insn
)))
2406 if (!any_condjump_p (NEXT_INSN (insn
)))
2408 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2409 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2411 delete_insn (NEXT_INSN (insn
));
2417 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2418 Returns a new basic block if we've terminated the current basic
2419 block and created a new one. */
2422 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2424 basic_block new_bb
, dest
;
2427 rtx_insn
*last2
, *last
;
2428 enum tree_code code
;
2431 code
= gimple_cond_code (stmt
);
2432 op0
= gimple_cond_lhs (stmt
);
2433 op1
= gimple_cond_rhs (stmt
);
2434 /* We're sometimes presented with such code:
2438 This would expand to two comparisons which then later might
2439 be cleaned up by combine. But some pattern matchers like if-conversion
2440 work better when there's only one compare, so make up for this
2441 here as special exception if TER would have made the same change. */
2443 && TREE_CODE (op0
) == SSA_NAME
2444 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2445 && TREE_CODE (op1
) == INTEGER_CST
2446 && ((gimple_cond_code (stmt
) == NE_EXPR
2447 && integer_zerop (op1
))
2448 || (gimple_cond_code (stmt
) == EQ_EXPR
2449 && integer_onep (op1
)))
2450 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2452 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2453 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2455 enum tree_code code2
= gimple_assign_rhs_code (second
);
2456 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2459 op0
= gimple_assign_rhs1 (second
);
2460 op1
= gimple_assign_rhs2 (second
);
2462 /* If jumps are cheap and the target does not support conditional
2463 compare, turn some more codes into jumpy sequences. */
2464 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2465 && targetm
.gen_ccmp_first
== NULL
)
2467 if ((code2
== BIT_AND_EXPR
2468 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2469 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2470 || code2
== TRUTH_AND_EXPR
)
2472 code
= TRUTH_ANDIF_EXPR
;
2473 op0
= gimple_assign_rhs1 (second
);
2474 op1
= gimple_assign_rhs2 (second
);
2476 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2478 code
= TRUTH_ORIF_EXPR
;
2479 op0
= gimple_assign_rhs1 (second
);
2480 op1
= gimple_assign_rhs2 (second
);
2486 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2487 into (x - C2) * C3 < C4. */
2488 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2489 && TREE_CODE (op0
) == SSA_NAME
2490 && TREE_CODE (op1
) == INTEGER_CST
)
2491 code
= maybe_optimize_mod_cmp (code
, &op0
, &op1
);
2493 last2
= last
= get_last_insn ();
2495 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2496 set_curr_insn_location (gimple_location (stmt
));
2498 /* These flags have no purpose in RTL land. */
2499 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2500 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2502 /* We can either have a pure conditional jump with one fallthru edge or
2503 two-way jump that needs to be decomposed into two basic blocks. */
2504 if (false_edge
->dest
== bb
->next_bb
)
2506 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2507 true_edge
->probability
);
2508 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2509 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2510 set_curr_insn_location (true_edge
->goto_locus
);
2511 false_edge
->flags
|= EDGE_FALLTHRU
;
2512 maybe_cleanup_end_of_block (false_edge
, last
);
2515 if (true_edge
->dest
== bb
->next_bb
)
2517 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2518 false_edge
->probability
);
2519 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2520 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2521 set_curr_insn_location (false_edge
->goto_locus
);
2522 true_edge
->flags
|= EDGE_FALLTHRU
;
2523 maybe_cleanup_end_of_block (true_edge
, last
);
2527 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2528 true_edge
->probability
);
2529 last
= get_last_insn ();
2530 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2531 set_curr_insn_location (false_edge
->goto_locus
);
2532 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2535 if (BARRIER_P (BB_END (bb
)))
2536 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2537 update_bb_for_insn (bb
);
2539 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2540 dest
= false_edge
->dest
;
2541 redirect_edge_succ (false_edge
, new_bb
);
2542 false_edge
->flags
|= EDGE_FALLTHRU
;
2543 new_bb
->count
= false_edge
->count ();
2544 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2545 add_bb_to_loop (new_bb
, loop
);
2546 if (loop
->latch
== bb
2547 && loop
->header
== dest
)
2548 loop
->latch
= new_bb
;
2549 make_single_succ_edge (new_bb
, dest
, 0);
2550 if (BARRIER_P (BB_END (new_bb
)))
2551 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2552 update_bb_for_insn (new_bb
);
2554 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2556 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2558 set_curr_insn_location (true_edge
->goto_locus
);
2559 true_edge
->goto_locus
= curr_insn_location ();
2565 /* Mark all calls that can have a transaction restart. */
2568 mark_transaction_restart_calls (gimple
*stmt
)
2570 struct tm_restart_node dummy
;
2571 tm_restart_node
**slot
;
2573 if (!cfun
->gimple_df
->tm_restart
)
2577 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2580 struct tm_restart_node
*n
= *slot
;
2581 tree list
= n
->label_or_list
;
2584 for (insn
= next_real_insn (get_last_insn ());
2586 insn
= next_real_insn (insn
))
2589 if (TREE_CODE (list
) == LABEL_DECL
)
2590 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2592 for (; list
; list
= TREE_CHAIN (list
))
2593 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2597 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2601 expand_call_stmt (gcall
*stmt
)
2603 tree exp
, decl
, lhs
;
2607 if (gimple_call_internal_p (stmt
))
2609 expand_internal_call (stmt
);
2613 /* If this is a call to a built-in function and it has no effect other
2614 than setting the lhs, try to implement it using an internal function
2616 decl
= gimple_call_fndecl (stmt
);
2617 if (gimple_call_lhs (stmt
)
2618 && !gimple_has_side_effects (stmt
)
2619 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2621 internal_fn ifn
= replacement_internal_fn (stmt
);
2622 if (ifn
!= IFN_LAST
)
2624 expand_internal_call (ifn
, stmt
);
2629 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2631 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2632 builtin_p
= decl
&& fndecl_built_in_p (decl
);
2634 /* If this is not a builtin function, the function type through which the
2635 call is made may be different from the type of the function. */
2638 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2639 CALL_EXPR_FN (exp
));
2641 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2642 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2644 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2646 tree arg
= gimple_call_arg (stmt
, i
);
2648 /* TER addresses into arguments of builtin functions so we have a
2649 chance to infer more correct alignment information. See PR39954. */
2651 && TREE_CODE (arg
) == SSA_NAME
2652 && (def
= get_gimple_for_ssa_name (arg
))
2653 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2654 arg
= gimple_assign_rhs1 (def
);
2655 CALL_EXPR_ARG (exp
, i
) = arg
;
2658 if (gimple_has_side_effects (stmt
))
2659 TREE_SIDE_EFFECTS (exp
) = 1;
2661 if (gimple_call_nothrow_p (stmt
))
2662 TREE_NOTHROW (exp
) = 1;
2664 if (gimple_no_warning_p (stmt
))
2665 TREE_NO_WARNING (exp
) = 1;
2667 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2668 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2669 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2671 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
2672 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2673 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2675 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2676 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2677 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2678 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2680 /* Ensure RTL is created for debug args. */
2681 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2683 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2688 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2690 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2691 expand_debug_expr (dtemp
);
2695 rtx_insn
*before_call
= get_last_insn ();
2696 lhs
= gimple_call_lhs (stmt
);
2698 expand_assignment (lhs
, exp
, false);
2700 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2702 /* If the gimple call is an indirect call and has 'nocf_check'
2703 attribute find a generated CALL insn to mark it as no
2704 control-flow verification is needed. */
2705 if (gimple_call_nocf_check_p (stmt
)
2706 && !gimple_call_fndecl (stmt
))
2708 rtx_insn
*last
= get_last_insn ();
2709 while (!CALL_P (last
)
2710 && last
!= before_call
)
2711 last
= PREV_INSN (last
);
2713 if (last
!= before_call
)
2714 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2717 mark_transaction_restart_calls (stmt
);
2721 /* Generate RTL for an asm statement (explicit assembler code).
2722 STRING is a STRING_CST node containing the assembler code text,
2723 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2724 insn is volatile; don't optimize it. */
2727 expand_asm_loc (tree string
, int vol
, location_t locus
)
2731 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2732 ggc_strdup (TREE_STRING_POINTER (string
)),
2735 MEM_VOLATILE_P (body
) = vol
;
2737 /* Non-empty basic ASM implicitly clobbers memory. */
2738 if (TREE_STRING_LENGTH (string
) != 0)
2741 unsigned i
, nclobbers
;
2742 auto_vec
<rtx
> input_rvec
, output_rvec
;
2743 auto_vec
<const char *> constraints
;
2744 auto_vec
<rtx
> clobber_rvec
;
2745 HARD_REG_SET clobbered_regs
;
2746 CLEAR_HARD_REG_SET (clobbered_regs
);
2748 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2749 clobber_rvec
.safe_push (clob
);
2751 if (targetm
.md_asm_adjust
)
2752 targetm
.md_asm_adjust (output_rvec
, input_rvec
,
2753 constraints
, clobber_rvec
,
2757 nclobbers
= clobber_rvec
.length ();
2758 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nclobbers
));
2760 XVECEXP (body
, 0, 0) = asm_op
;
2761 for (i
= 0; i
< nclobbers
; i
++)
2762 XVECEXP (body
, 0, i
+ 1) = gen_rtx_CLOBBER (VOIDmode
, clobber_rvec
[i
]);
2768 /* Return the number of times character C occurs in string S. */
2770 n_occurrences (int c
, const char *s
)
2778 /* A subroutine of expand_asm_operands. Check that all operands have
2779 the same number of alternatives. Return true if so. */
2782 check_operand_nalternatives (const vec
<const char *> &constraints
)
2784 unsigned len
= constraints
.length();
2787 int nalternatives
= n_occurrences (',', constraints
[0]);
2789 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2791 error ("too many alternatives in %<asm%>");
2795 for (unsigned i
= 1; i
< len
; ++i
)
2796 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2798 error ("operand constraints for %<asm%> differ "
2799 "in number of alternatives");
2806 /* Check for overlap between registers marked in CLOBBERED_REGS and
2807 anything inappropriate in T. Emit error and return the register
2808 variable definition for error, NULL_TREE for ok. */
2811 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
)
2813 /* Conflicts between asm-declared register variables and the clobber
2814 list are not allowed. */
2815 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
2819 error ("%<asm%> specifier for variable %qE conflicts with "
2820 "%<asm%> clobber list",
2821 DECL_NAME (overlap
));
2823 /* Reset registerness to stop multiple errors emitted for a single
2825 DECL_REGISTER (overlap
) = 0;
2832 /* Check that the given REGNO spanning NREGS is a valid
2833 asm clobber operand. Some HW registers cannot be
2834 saved/restored, hence they should not be clobbered by
2837 asm_clobber_reg_is_valid (int regno
, int nregs
, const char *regname
)
2839 bool is_valid
= true;
2840 HARD_REG_SET regset
;
2842 CLEAR_HARD_REG_SET (regset
);
2844 add_range_to_hard_reg_set (®set
, regno
, nregs
);
2846 /* Clobbering the PIC register is an error. */
2847 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
2848 && overlaps_hard_reg_set_p (regset
, Pmode
, PIC_OFFSET_TABLE_REGNUM
))
2850 /* ??? Diagnose during gimplification? */
2851 error ("PIC register clobbered by %qs in %<asm%>", regname
);
2854 else if (!in_hard_reg_set_p
2855 (accessible_reg_set
, reg_raw_mode
[regno
], regno
))
2857 /* ??? Diagnose during gimplification? */
2858 error ("the register %qs cannot be clobbered in %<asm%>"
2859 " for the current target", regname
);
2863 /* Clobbering the stack pointer register is deprecated. GCC expects
2864 the value of the stack pointer after an asm statement to be the same
2865 as it was before, so no asm can validly clobber the stack pointer in
2866 the usual sense. Adding the stack pointer to the clobber list has
2867 traditionally had some undocumented and somewhat obscure side-effects. */
2868 if (overlaps_hard_reg_set_p (regset
, Pmode
, STACK_POINTER_REGNUM
)
2869 && warning (OPT_Wdeprecated
, "listing the stack pointer register"
2870 " %qs in a clobber list is deprecated", regname
))
2871 inform (input_location
, "the value of the stack pointer after an %<asm%>"
2872 " statement must be the same as it was before the statement");
2877 /* Generate RTL for an asm statement with arguments.
2878 STRING is the instruction template.
2879 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
2880 Each output or input has an expression in the TREE_VALUE and
2881 a tree list in TREE_PURPOSE which in turn contains a constraint
2882 name in TREE_VALUE (or NULL_TREE) and a constraint string
2884 CLOBBERS is a list of STRING_CST nodes each naming a hard register
2885 that is clobbered by this insn.
2887 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
2888 should be the fallthru basic block of the asm goto.
2890 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
2891 Some elements of OUTPUTS may be replaced with trees representing temporary
2892 values. The caller should copy those temporary values to the originally
2895 VOL nonzero means the insn is volatile; don't optimize it. */
2898 expand_asm_stmt (gasm
*stmt
)
2900 class save_input_location
2905 explicit save_input_location(location_t where
)
2907 old
= input_location
;
2908 input_location
= where
;
2911 ~save_input_location()
2913 input_location
= old
;
2917 location_t locus
= gimple_location (stmt
);
2919 if (gimple_asm_input_p (stmt
))
2921 const char *s
= gimple_asm_string (stmt
);
2922 tree string
= build_string (strlen (s
), s
);
2923 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
2927 /* There are some legacy diagnostics in here, and also avoids a
2928 sixth parameger to targetm.md_asm_adjust. */
2929 save_input_location
s_i_l(locus
);
2931 unsigned noutputs
= gimple_asm_noutputs (stmt
);
2932 unsigned ninputs
= gimple_asm_ninputs (stmt
);
2933 unsigned nlabels
= gimple_asm_nlabels (stmt
);
2936 /* ??? Diagnose during gimplification? */
2937 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
2939 error ("more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
2943 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
2944 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
2945 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
2947 /* Copy the gimple vectors into new vectors that we can manipulate. */
2949 output_tvec
.safe_grow (noutputs
);
2950 input_tvec
.safe_grow (ninputs
);
2951 constraints
.safe_grow (noutputs
+ ninputs
);
2953 for (i
= 0; i
< noutputs
; ++i
)
2955 tree t
= gimple_asm_output_op (stmt
, i
);
2956 output_tvec
[i
] = TREE_VALUE (t
);
2957 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2959 for (i
= 0; i
< ninputs
; i
++)
2961 tree t
= gimple_asm_input_op (stmt
, i
);
2962 input_tvec
[i
] = TREE_VALUE (t
);
2963 constraints
[i
+ noutputs
]
2964 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
2967 /* ??? Diagnose during gimplification? */
2968 if (! check_operand_nalternatives (constraints
))
2971 /* Count the number of meaningful clobbered registers, ignoring what
2972 we would ignore later. */
2973 auto_vec
<rtx
> clobber_rvec
;
2974 HARD_REG_SET clobbered_regs
;
2975 CLEAR_HARD_REG_SET (clobbered_regs
);
2977 if (unsigned n
= gimple_asm_nclobbers (stmt
))
2979 clobber_rvec
.reserve (n
);
2980 for (i
= 0; i
< n
; i
++)
2982 tree t
= gimple_asm_clobber_op (stmt
, i
);
2983 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
2986 j
= decode_reg_name_and_count (regname
, &nregs
);
2991 /* ??? Diagnose during gimplification? */
2992 error ("unknown register name %qs in %<asm%>", regname
);
2996 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2997 clobber_rvec
.safe_push (x
);
3001 /* Otherwise we should have -1 == empty string
3002 or -3 == cc, which is not a register. */
3003 gcc_assert (j
== -1 || j
== -3);
3007 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
3009 if (!asm_clobber_reg_is_valid (reg
, nregs
, regname
))
3012 SET_HARD_REG_BIT (clobbered_regs
, reg
);
3013 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
3014 clobber_rvec
.safe_push (x
);
3019 /* First pass over inputs and outputs checks validity and sets
3020 mark_addressable if needed. */
3021 /* ??? Diagnose during gimplification? */
3023 for (i
= 0; i
< noutputs
; ++i
)
3025 tree val
= output_tvec
[i
];
3026 tree type
= TREE_TYPE (val
);
3027 const char *constraint
;
3032 /* Try to parse the output constraint. If that fails, there's
3033 no point in going further. */
3034 constraint
= constraints
[i
];
3035 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3036 &allows_mem
, &allows_reg
, &is_inout
))
3039 /* If the output is a hard register, verify it doesn't conflict with
3040 any other operand's possible hard register use. */
3042 && REG_P (DECL_RTL (val
))
3043 && HARD_REGISTER_P (DECL_RTL (val
)))
3045 unsigned j
, output_hregno
= REGNO (DECL_RTL (val
));
3046 bool early_clobber_p
= strchr (constraints
[i
], '&') != NULL
;
3047 unsigned long match
;
3049 /* Verify the other outputs do not use the same hard register. */
3050 for (j
= i
+ 1; j
< noutputs
; ++j
)
3051 if (DECL_P (output_tvec
[j
])
3052 && REG_P (DECL_RTL (output_tvec
[j
]))
3053 && HARD_REGISTER_P (DECL_RTL (output_tvec
[j
]))
3054 && output_hregno
== REGNO (DECL_RTL (output_tvec
[j
])))
3055 error ("invalid hard register usage between output operands");
3057 /* Verify matching constraint operands use the same hard register
3058 and that the non-matching constraint operands do not use the same
3059 hard register if the output is an early clobber operand. */
3060 for (j
= 0; j
< ninputs
; ++j
)
3061 if (DECL_P (input_tvec
[j
])
3062 && REG_P (DECL_RTL (input_tvec
[j
]))
3063 && HARD_REGISTER_P (DECL_RTL (input_tvec
[j
])))
3065 unsigned input_hregno
= REGNO (DECL_RTL (input_tvec
[j
]));
3066 switch (*constraints
[j
+ noutputs
])
3068 case '0': case '1': case '2': case '3': case '4':
3069 case '5': case '6': case '7': case '8': case '9':
3070 match
= strtoul (constraints
[j
+ noutputs
], NULL
, 10);
3077 && output_hregno
!= input_hregno
)
3078 error ("invalid hard register usage between output operand "
3079 "and matching constraint operand");
3080 else if (early_clobber_p
3082 && output_hregno
== input_hregno
)
3083 error ("invalid hard register usage between earlyclobber "
3084 "operand and input operand");
3092 && REG_P (DECL_RTL (val
))
3093 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3094 mark_addressable (val
);
3097 for (i
= 0; i
< ninputs
; ++i
)
3099 bool allows_reg
, allows_mem
;
3100 const char *constraint
;
3102 constraint
= constraints
[i
+ noutputs
];
3103 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3104 constraints
.address (),
3105 &allows_mem
, &allows_reg
))
3108 if (! allows_reg
&& allows_mem
)
3109 mark_addressable (input_tvec
[i
]);
3112 /* Second pass evaluates arguments. */
3114 /* Make sure stack is consistent for asm goto. */
3116 do_pending_stack_adjust ();
3117 int old_generating_concat_p
= generating_concat_p
;
3119 /* Vector of RTX's of evaluated output operands. */
3120 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3121 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3122 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3124 output_rvec
.safe_grow (noutputs
);
3126 for (i
= 0; i
< noutputs
; ++i
)
3128 tree val
= output_tvec
[i
];
3129 tree type
= TREE_TYPE (val
);
3130 bool is_inout
, allows_reg
, allows_mem
, ok
;
3133 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3134 noutputs
, &allows_mem
, &allows_reg
,
3138 /* If an output operand is not a decl or indirect ref and our constraint
3139 allows a register, make a temporary to act as an intermediate.
3140 Make the asm insn write into that, then we will copy it to
3141 the real output operand. Likewise for promoted variables. */
3143 generating_concat_p
= 0;
3145 if ((TREE_CODE (val
) == INDIRECT_REF
&& allows_mem
)
3147 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3148 && ! (REG_P (DECL_RTL (val
))
3149 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3152 || TREE_ADDRESSABLE (type
))
3154 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3155 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3157 op
= validize_mem (op
);
3159 if (! allows_reg
&& !MEM_P (op
))
3160 error ("output number %d not directly addressable", i
);
3161 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3162 || GET_CODE (op
) == CONCAT
)
3165 op
= gen_reg_rtx (GET_MODE (op
));
3167 generating_concat_p
= old_generating_concat_p
;
3170 emit_move_insn (op
, old_op
);
3172 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3173 emit_move_insn (old_op
, op
);
3174 after_rtl_seq
= get_insns ();
3175 after_rtl_end
= get_last_insn ();
3181 op
= assign_temp (type
, 0, 1);
3182 op
= validize_mem (op
);
3183 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3184 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3186 generating_concat_p
= old_generating_concat_p
;
3188 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3189 expand_assignment (val
, make_tree (type
, op
), false);
3190 after_rtl_seq
= get_insns ();
3191 after_rtl_end
= get_last_insn ();
3194 output_rvec
[i
] = op
;
3197 inout_opnum
.safe_push (i
);
3200 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3201 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3203 input_rvec
.safe_grow (ninputs
);
3204 input_mode
.safe_grow (ninputs
);
3206 generating_concat_p
= 0;
3208 for (i
= 0; i
< ninputs
; ++i
)
3210 tree val
= input_tvec
[i
];
3211 tree type
= TREE_TYPE (val
);
3212 bool allows_reg
, allows_mem
, ok
;
3213 const char *constraint
;
3216 constraint
= constraints
[i
+ noutputs
];
3217 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3218 constraints
.address (),
3219 &allows_mem
, &allows_reg
);
3222 /* EXPAND_INITIALIZER will not generate code for valid initializer
3223 constants, but will still generate code for other types of operand.
3224 This is the behavior we want for constant constraints. */
3225 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3226 allows_reg
? EXPAND_NORMAL
3227 : allows_mem
? EXPAND_MEMORY
3228 : EXPAND_INITIALIZER
);
3230 /* Never pass a CONCAT to an ASM. */
3231 if (GET_CODE (op
) == CONCAT
)
3232 op
= force_reg (GET_MODE (op
), op
);
3233 else if (MEM_P (op
))
3234 op
= validize_mem (op
);
3236 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3238 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3239 op
= force_reg (TYPE_MODE (type
), op
);
3240 else if (!allows_mem
)
3241 warning (0, "%<asm%> operand %d probably does not match "
3244 else if (MEM_P (op
))
3246 /* We won't recognize either volatile memory or memory
3247 with a queued address as available a memory_operand
3248 at this point. Ignore it: clearly this *is* a memory. */
3254 input_mode
[i
] = TYPE_MODE (type
);
3257 /* For in-out operands, copy output rtx to input rtx. */
3258 unsigned ninout
= inout_opnum
.length();
3259 for (i
= 0; i
< ninout
; i
++)
3261 int j
= inout_opnum
[i
];
3262 rtx o
= output_rvec
[j
];
3264 input_rvec
.safe_push (o
);
3265 input_mode
.safe_push (GET_MODE (o
));
3268 sprintf (buffer
, "%d", j
);
3269 constraints
.safe_push (ggc_strdup (buffer
));
3273 /* Sometimes we wish to automatically clobber registers across an asm.
3274 Case in point is when the i386 backend moved from cc0 to a hard reg --
3275 maintaining source-level compatibility means automatically clobbering
3276 the flags register. */
3277 rtx_insn
*after_md_seq
= NULL
;
3278 if (targetm
.md_asm_adjust
)
3279 after_md_seq
= targetm
.md_asm_adjust (output_rvec
, input_rvec
,
3280 constraints
, clobber_rvec
,
3283 /* Do not allow the hook to change the output and input count,
3284 lest it mess up the operand numbering. */
3285 gcc_assert (output_rvec
.length() == noutputs
);
3286 gcc_assert (input_rvec
.length() == ninputs
);
3287 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3289 /* But it certainly can adjust the clobbers. */
3290 unsigned nclobbers
= clobber_rvec
.length ();
3292 /* Third pass checks for easy conflicts. */
3293 /* ??? Why are we doing this on trees instead of rtx. */
3295 bool clobber_conflict_found
= 0;
3296 for (i
= 0; i
< noutputs
; ++i
)
3297 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
))
3298 clobber_conflict_found
= 1;
3299 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3300 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
))
3301 clobber_conflict_found
= 1;
3303 /* Make vectors for the expression-rtx, constraint strings,
3304 and named operands. */
3306 rtvec argvec
= rtvec_alloc (ninputs
);
3307 rtvec constraintvec
= rtvec_alloc (ninputs
);
3308 rtvec labelvec
= rtvec_alloc (nlabels
);
3310 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3311 : GET_MODE (output_rvec
[0])),
3312 ggc_strdup (gimple_asm_string (stmt
)),
3313 "", 0, argvec
, constraintvec
,
3315 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3317 for (i
= 0; i
< ninputs
; ++i
)
3319 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3320 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3321 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3322 constraints
[i
+ noutputs
],
3326 /* Copy labels to the vector. */
3327 rtx_code_label
*fallthru_label
= NULL
;
3330 basic_block fallthru_bb
= NULL
;
3331 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3333 fallthru_bb
= fallthru
->dest
;
3335 for (i
= 0; i
< nlabels
; ++i
)
3337 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3339 /* If asm goto has any labels in the fallthru basic block, use
3340 a label that we emit immediately after the asm goto. Expansion
3341 may insert further instructions into the same basic block after
3342 asm goto and if we don't do this, insertion of instructions on
3343 the fallthru edge might misbehave. See PR58670. */
3344 if (fallthru_bb
&& label_to_block (cfun
, label
) == fallthru_bb
)
3346 if (fallthru_label
== NULL_RTX
)
3347 fallthru_label
= gen_label_rtx ();
3351 r
= label_rtx (label
);
3352 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3356 /* Now, for each output, construct an rtx
3357 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3358 ARGVEC CONSTRAINTS OPNAMES))
3359 If there is more than one, put them inside a PARALLEL. */
3361 if (nlabels
> 0 && nclobbers
== 0)
3363 gcc_assert (noutputs
== 0);
3364 emit_jump_insn (body
);
3366 else if (noutputs
== 0 && nclobbers
== 0)
3368 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3371 else if (noutputs
== 1 && nclobbers
== 0)
3373 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3374 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3384 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (num
+ nclobbers
));
3386 /* For each output operand, store a SET. */
3387 for (i
= 0; i
< noutputs
; ++i
)
3389 rtx src
, o
= output_rvec
[i
];
3392 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3397 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3398 ASM_OPERANDS_TEMPLATE (obody
),
3399 constraints
[i
], i
, argvec
,
3400 constraintvec
, labelvec
, locus
);
3401 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3403 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3406 /* If there are no outputs (but there are some clobbers)
3407 store the bare ASM_OPERANDS into the PARALLEL. */
3409 XVECEXP (body
, 0, i
++) = obody
;
3411 /* Store (clobber REG) for each clobbered register specified. */
3412 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3414 rtx clobbered_reg
= clobber_rvec
[j
];
3416 /* Do sanity check for overlap between clobbers and respectively
3417 input and outputs that hasn't been handled. Such overlap
3418 should have been detected and reported above. */
3419 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3421 /* We test the old body (obody) contents to avoid
3422 tripping over the under-construction body. */
3423 for (unsigned k
= 0; k
< noutputs
; ++k
)
3424 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3425 internal_error ("%<asm%> clobber conflict with "
3428 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3429 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3430 internal_error ("%<asm%> clobber conflict with "
3434 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3438 emit_jump_insn (body
);
3443 generating_concat_p
= old_generating_concat_p
;
3446 emit_label (fallthru_label
);
3449 emit_insn (after_md_seq
);
3451 emit_insn (after_rtl_seq
);
3454 crtl
->has_asm_statement
= 1;
3457 /* Emit code to jump to the address
3458 specified by the pointer expression EXP. */
3461 expand_computed_goto (tree exp
)
3463 rtx x
= expand_normal (exp
);
3465 do_pending_stack_adjust ();
3466 emit_indirect_jump (x
);
3469 /* Generate RTL code for a `goto' statement with target label LABEL.
3470 LABEL should be a LABEL_DECL tree node that was or will later be
3471 defined with `expand_label'. */
3474 expand_goto (tree label
)
3478 /* Check for a nonlocal goto to a containing function. Should have
3479 gotten translated to __builtin_nonlocal_goto. */
3480 tree context
= decl_function_context (label
);
3481 gcc_assert (!context
|| context
== current_function_decl
);
3484 emit_jump (jump_target_rtx (label
));
3487 /* Output a return with no value. */
3490 expand_null_return_1 (void)
3492 clear_pending_stack_adjust ();
3493 do_pending_stack_adjust ();
3494 emit_jump (return_label
);
3497 /* Generate RTL to return from the current function, with no value.
3498 (That is, we do not do anything about returning any value.) */
3501 expand_null_return (void)
3503 /* If this function was declared to return a value, but we
3504 didn't, clobber the return registers so that they are not
3505 propagated live to the rest of the function. */
3506 clobber_return_register ();
3508 expand_null_return_1 ();
3511 /* Generate RTL to return from the current function, with value VAL. */
3514 expand_value_return (rtx val
)
3516 /* Copy the value to the return location unless it's already there. */
3518 tree decl
= DECL_RESULT (current_function_decl
);
3519 rtx return_reg
= DECL_RTL (decl
);
3520 if (return_reg
!= val
)
3522 tree funtype
= TREE_TYPE (current_function_decl
);
3523 tree type
= TREE_TYPE (decl
);
3524 int unsignedp
= TYPE_UNSIGNED (type
);
3525 machine_mode old_mode
= DECL_MODE (decl
);
3527 if (DECL_BY_REFERENCE (decl
))
3528 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3530 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3532 if (mode
!= old_mode
)
3533 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3535 if (GET_CODE (return_reg
) == PARALLEL
)
3536 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3538 emit_move_insn (return_reg
, val
);
3541 expand_null_return_1 ();
3544 /* Generate RTL to evaluate the expression RETVAL and return it
3545 from the current function. */
3548 expand_return (tree retval
)
3554 /* If function wants no value, give it none. */
3555 if (TREE_CODE (TREE_TYPE (TREE_TYPE (current_function_decl
))) == VOID_TYPE
)
3557 expand_normal (retval
);
3558 expand_null_return ();
3562 if (retval
== error_mark_node
)
3564 /* Treat this like a return of no value from a function that
3566 expand_null_return ();
3569 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3570 || TREE_CODE (retval
) == INIT_EXPR
)
3571 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3572 retval_rhs
= TREE_OPERAND (retval
, 1);
3574 retval_rhs
= retval
;
3576 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3578 /* If we are returning the RESULT_DECL, then the value has already
3579 been stored into it, so we don't have to do anything special. */
3580 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3581 expand_value_return (result_rtl
);
3583 /* If the result is an aggregate that is being returned in one (or more)
3584 registers, load the registers here. */
3586 else if (retval_rhs
!= 0
3587 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3588 && REG_P (result_rtl
))
3590 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3593 /* Use the mode of the result value on the return register. */
3594 PUT_MODE (result_rtl
, GET_MODE (val
));
3595 expand_value_return (val
);
3598 expand_null_return ();
3600 else if (retval_rhs
!= 0
3601 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3602 && (REG_P (result_rtl
)
3603 || (GET_CODE (result_rtl
) == PARALLEL
)))
3605 /* Compute the return value into a temporary (usually a pseudo reg). */
3607 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3608 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3609 val
= force_not_mem (val
);
3610 expand_value_return (val
);
3614 /* No hard reg used; calculate value into hard return reg. */
3615 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3616 expand_value_return (result_rtl
);
3620 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3621 register, tell the rtl optimizers that its value is no longer
3625 expand_clobber (tree lhs
)
3629 rtx decl_rtl
= DECL_RTL_IF_SET (lhs
);
3630 if (decl_rtl
&& REG_P (decl_rtl
))
3632 machine_mode decl_mode
= GET_MODE (decl_rtl
);
3633 if (maybe_gt (GET_MODE_SIZE (decl_mode
),
3634 REGMODE_NATURAL_SIZE (decl_mode
)))
3635 emit_clobber (decl_rtl
);
3640 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3641 STMT that doesn't require special handling for outgoing edges. That
3642 is no tailcalls and no GIMPLE_COND. */
3645 expand_gimple_stmt_1 (gimple
*stmt
)
3649 set_curr_insn_location (gimple_location (stmt
));
3651 switch (gimple_code (stmt
))
3654 op0
= gimple_goto_dest (stmt
);
3655 if (TREE_CODE (op0
) == LABEL_DECL
)
3658 expand_computed_goto (op0
);
3661 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3664 case GIMPLE_PREDICT
:
3668 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3669 if (gimple_switch_num_labels (swtch
) == 1)
3670 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3672 expand_case (swtch
);
3676 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3679 expand_call_stmt (as_a
<gcall
*> (stmt
));
3684 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3686 /* If a return doesn't have a location, it very likely represents
3687 multiple user returns so we cannot let it inherit the location
3688 of the last statement of the previous basic block in RTL. */
3689 if (!gimple_has_location (stmt
))
3690 set_curr_insn_location (cfun
->function_end_locus
);
3692 if (op0
&& op0
!= error_mark_node
)
3694 tree result
= DECL_RESULT (current_function_decl
);
3696 /* If we are not returning the current function's RESULT_DECL,
3697 build an assignment to it. */
3700 /* I believe that a function's RESULT_DECL is unique. */
3701 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3703 /* ??? We'd like to use simply expand_assignment here,
3704 but this fails if the value is of BLKmode but the return
3705 decl is a register. expand_return has special handling
3706 for this combination, which eventually should move
3707 to common code. See comments there. Until then, let's
3708 build a modify expression :-/ */
3709 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3715 expand_null_return ();
3717 expand_return (op0
);
3723 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3724 tree lhs
= gimple_assign_lhs (assign_stmt
);
3726 /* Tree expand used to fiddle with |= and &= of two bitfield
3727 COMPONENT_REFs here. This can't happen with gimple, the LHS
3728 of binary assigns must be a gimple reg. */
3730 if (TREE_CODE (lhs
) != SSA_NAME
3731 || get_gimple_rhs_class (gimple_expr_code (stmt
))
3732 == GIMPLE_SINGLE_RHS
)
3734 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
3735 gcc_assert (get_gimple_rhs_class (gimple_expr_code (stmt
))
3736 == GIMPLE_SINGLE_RHS
);
3737 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
3738 /* Do not put locations on possibly shared trees. */
3739 && !is_gimple_min_invariant (rhs
))
3740 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
3741 if (TREE_CLOBBER_P (rhs
))
3742 /* This is a clobber to mark the going out of scope for
3744 expand_clobber (lhs
);
3746 expand_assignment (lhs
, rhs
,
3747 gimple_assign_nontemporal_move_p (
3753 bool nontemporal
= gimple_assign_nontemporal_move_p (assign_stmt
);
3754 struct separate_ops ops
;
3755 bool promoted
= false;
3757 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
3758 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
3761 ops
.code
= gimple_assign_rhs_code (assign_stmt
);
3762 ops
.type
= TREE_TYPE (lhs
);
3763 switch (get_gimple_rhs_class (ops
.code
))
3765 case GIMPLE_TERNARY_RHS
:
3766 ops
.op2
= gimple_assign_rhs3 (assign_stmt
);
3768 case GIMPLE_BINARY_RHS
:
3769 ops
.op1
= gimple_assign_rhs2 (assign_stmt
);
3771 case GIMPLE_UNARY_RHS
:
3772 ops
.op0
= gimple_assign_rhs1 (assign_stmt
);
3777 ops
.location
= gimple_location (stmt
);
3779 /* If we want to use a nontemporal store, force the value to
3780 register first. If we store into a promoted register,
3781 don't directly expand to target. */
3782 temp
= nontemporal
|| promoted
? NULL_RTX
: target
;
3783 temp
= expand_expr_real_2 (&ops
, temp
, GET_MODE (target
),
3790 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
3791 /* If TEMP is a VOIDmode constant, use convert_modes to make
3792 sure that we properly convert it. */
3793 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
3795 temp
= convert_modes (GET_MODE (target
),
3796 TYPE_MODE (ops
.type
),
3798 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
3799 GET_MODE (target
), temp
, unsignedp
);
3802 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
3804 else if (nontemporal
&& emit_storent_insn (target
, temp
))
3808 temp
= force_operand (temp
, target
);
3810 emit_move_insn (target
, temp
);
3821 /* Expand one gimple statement STMT and return the last RTL instruction
3822 before any of the newly generated ones.
3824 In addition to generating the necessary RTL instructions this also
3825 sets REG_EH_REGION notes if necessary and sets the current source
3826 location for diagnostics. */
3829 expand_gimple_stmt (gimple
*stmt
)
3831 location_t saved_location
= input_location
;
3832 rtx_insn
*last
= get_last_insn ();
3837 /* We need to save and restore the current source location so that errors
3838 discovered during expansion are emitted with the right location. But
3839 it would be better if the diagnostic routines used the source location
3840 embedded in the tree nodes rather than globals. */
3841 if (gimple_has_location (stmt
))
3842 input_location
= gimple_location (stmt
);
3844 expand_gimple_stmt_1 (stmt
);
3846 /* Free any temporaries used to evaluate this statement. */
3849 input_location
= saved_location
;
3851 /* Mark all insns that may trap. */
3852 lp_nr
= lookup_stmt_eh_lp (stmt
);
3856 for (insn
= next_real_insn (last
); insn
;
3857 insn
= next_real_insn (insn
))
3859 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
3860 /* If we want exceptions for non-call insns, any
3861 may_trap_p instruction may throw. */
3862 && GET_CODE (PATTERN (insn
)) != CLOBBER
3863 && GET_CODE (PATTERN (insn
)) != USE
3864 && insn_could_throw_p (insn
))
3865 make_reg_eh_region_note (insn
, 0, lp_nr
);
3872 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
3873 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
3874 generated a tail call (something that might be denied by the ABI
3875 rules governing the call; see calls.c).
3877 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
3878 can still reach the rest of BB. The case here is __builtin_sqrt,
3879 where the NaN result goes through the external function (with a
3880 tailcall) and the normal result happens via a sqrt instruction. */
3883 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
3885 rtx_insn
*last2
, *last
;
3888 profile_probability probability
;
3890 last2
= last
= expand_gimple_stmt (stmt
);
3892 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
3893 if (CALL_P (last
) && SIBLING_CALL_P (last
))
3896 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3898 *can_fallthru
= true;
3902 /* ??? Wouldn't it be better to just reset any pending stack adjust?
3903 Any instructions emitted here are about to be deleted. */
3904 do_pending_stack_adjust ();
3906 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
3907 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
3908 EH or abnormal edges, we shouldn't have created a tail call in
3909 the first place. So it seems to me we should just be removing
3910 all edges here, or redirecting the existing fallthru edge to
3913 probability
= profile_probability::never ();
3915 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
3917 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
3919 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
3920 e
->dest
->count
-= e
->count ();
3921 probability
+= e
->probability
;
3928 /* This is somewhat ugly: the call_expr expander often emits instructions
3929 after the sibcall (to perform the function return). These confuse the
3930 find_many_sub_basic_blocks code, so we need to get rid of these. */
3931 last
= NEXT_INSN (last
);
3932 gcc_assert (BARRIER_P (last
));
3934 *can_fallthru
= false;
3935 while (NEXT_INSN (last
))
3937 /* For instance an sqrt builtin expander expands if with
3938 sibcall in the then and label for `else`. */
3939 if (LABEL_P (NEXT_INSN (last
)))
3941 *can_fallthru
= true;
3944 delete_insn (NEXT_INSN (last
));
3947 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
3949 e
->probability
= probability
;
3951 update_bb_for_insn (bb
);
3953 if (NEXT_INSN (last
))
3955 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
3958 if (BARRIER_P (last
))
3959 BB_END (bb
) = PREV_INSN (last
);
3962 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
3967 /* Return the difference between the floor and the truncated result of
3968 a signed division by OP1 with remainder MOD. */
3970 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3972 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
3973 return gen_rtx_IF_THEN_ELSE
3974 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3975 gen_rtx_IF_THEN_ELSE
3976 (mode
, gen_rtx_LT (BImode
,
3977 gen_rtx_DIV (mode
, op1
, mod
),
3979 constm1_rtx
, const0_rtx
),
3983 /* Return the difference between the ceil and the truncated result of
3984 a signed division by OP1 with remainder MOD. */
3986 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
3988 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
3989 return gen_rtx_IF_THEN_ELSE
3990 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
3991 gen_rtx_IF_THEN_ELSE
3992 (mode
, gen_rtx_GT (BImode
,
3993 gen_rtx_DIV (mode
, op1
, mod
),
3995 const1_rtx
, const0_rtx
),
3999 /* Return the difference between the ceil and the truncated result of
4000 an unsigned division by OP1 with remainder MOD. */
4002 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
4004 /* (mod != 0 ? 1 : 0) */
4005 return gen_rtx_IF_THEN_ELSE
4006 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4007 const1_rtx
, const0_rtx
);
4010 /* Return the difference between the rounded and the truncated result
4011 of a signed division by OP1 with remainder MOD. Halfway cases are
4012 rounded away from zero, rather than to the nearest even number. */
4014 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4016 /* (abs (mod) >= abs (op1) - abs (mod)
4017 ? (op1 / mod > 0 ? 1 : -1)
4019 return gen_rtx_IF_THEN_ELSE
4020 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
4021 gen_rtx_MINUS (mode
,
4022 gen_rtx_ABS (mode
, op1
),
4023 gen_rtx_ABS (mode
, mod
))),
4024 gen_rtx_IF_THEN_ELSE
4025 (mode
, gen_rtx_GT (BImode
,
4026 gen_rtx_DIV (mode
, op1
, mod
),
4028 const1_rtx
, constm1_rtx
),
4032 /* Return the difference between the rounded and the truncated result
4033 of a unsigned division by OP1 with remainder MOD. Halfway cases
4034 are rounded away from zero, rather than to the nearest even
4037 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4039 /* (mod >= op1 - mod ? 1 : 0) */
4040 return gen_rtx_IF_THEN_ELSE
4041 (mode
, gen_rtx_GE (BImode
, mod
,
4042 gen_rtx_MINUS (mode
, op1
, mod
)),
4043 const1_rtx
, const0_rtx
);
4046 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4050 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4053 #ifndef POINTERS_EXTEND_UNSIGNED
4054 gcc_assert (mode
== Pmode
4055 || mode
== targetm
.addr_space
.address_mode (as
));
4056 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4060 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4062 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4065 /* X must have some form of address mode already. */
4066 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4067 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4068 x
= lowpart_subreg (mode
, x
, xmode
);
4069 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4070 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4071 else if (!POINTERS_EXTEND_UNSIGNED
)
4072 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4075 switch (GET_CODE (x
))
4078 if ((SUBREG_PROMOTED_VAR_P (x
)
4079 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4080 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4081 && REG_P (XEXP (SUBREG_REG (x
), 0))
4082 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4083 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4084 && GET_MODE (SUBREG_REG (x
)) == mode
)
4085 return SUBREG_REG (x
);
4088 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4089 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4092 temp
= shallow_copy_rtx (x
);
4093 PUT_MODE (temp
, mode
);
4096 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4098 temp
= gen_rtx_CONST (mode
, temp
);
4102 if (CONST_INT_P (XEXP (x
, 1)))
4104 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4106 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4112 /* Don't know how to express ptr_extend as operation in debug info. */
4115 #endif /* POINTERS_EXTEND_UNSIGNED */
4120 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4121 by avoid_deep_ter_for_debug. */
4123 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4125 /* Split too deep TER chains for debug stmts using debug temporaries. */
4128 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4130 use_operand_p use_p
;
4132 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4134 tree use
= USE_FROM_PTR (use_p
);
4135 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4137 gimple
*g
= get_gimple_for_ssa_name (use
);
4140 if (depth
> 6 && !stmt_ends_bb_p (g
))
4142 if (deep_ter_debug_map
== NULL
)
4143 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4145 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4148 vexpr
= make_node (DEBUG_EXPR_DECL
);
4149 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4150 DECL_ARTIFICIAL (vexpr
) = 1;
4151 TREE_TYPE (vexpr
) = TREE_TYPE (use
);
4152 SET_DECL_MODE (vexpr
, TYPE_MODE (TREE_TYPE (use
)));
4153 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4154 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4155 avoid_deep_ter_for_debug (def_temp
, 0);
4158 avoid_deep_ter_for_debug (g
, depth
+ 1);
4162 /* Return an RTX equivalent to the value of the parameter DECL. */
4165 expand_debug_parm_decl (tree decl
)
4167 rtx incoming
= DECL_INCOMING_RTL (decl
);
4170 && GET_MODE (incoming
) != BLKmode
4171 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4172 || (MEM_P (incoming
)
4173 && REG_P (XEXP (incoming
, 0))
4174 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4176 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4178 #ifdef HAVE_window_save
4179 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4180 If the target machine has an explicit window save instruction, the
4181 actual entry value is the corresponding OUTGOING_REGNO instead. */
4182 if (REG_P (incoming
)
4183 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4185 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4186 OUTGOING_REGNO (REGNO (incoming
)), 0);
4187 else if (MEM_P (incoming
))
4189 rtx reg
= XEXP (incoming
, 0);
4190 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4192 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4193 incoming
= replace_equiv_address_nv (incoming
, reg
);
4196 incoming
= copy_rtx (incoming
);
4200 ENTRY_VALUE_EXP (rtl
) = incoming
;
4205 && GET_MODE (incoming
) != BLKmode
4206 && !TREE_ADDRESSABLE (decl
)
4208 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4209 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4210 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4211 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4212 return copy_rtx (incoming
);
4217 /* Return an RTX equivalent to the value of the tree expression EXP. */
4220 expand_debug_expr (tree exp
)
4222 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4223 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4224 machine_mode inner_mode
= VOIDmode
;
4225 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4227 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4229 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4231 case tcc_expression
:
4232 switch (TREE_CODE (exp
))
4237 case WIDEN_MULT_PLUS_EXPR
:
4238 case WIDEN_MULT_MINUS_EXPR
:
4241 case TRUTH_ANDIF_EXPR
:
4242 case TRUTH_ORIF_EXPR
:
4243 case TRUTH_AND_EXPR
:
4245 case TRUTH_XOR_EXPR
:
4248 case TRUTH_NOT_EXPR
:
4257 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4264 if (mode
== BLKmode
)
4266 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4269 switch (TREE_CODE (exp
))
4275 case WIDEN_LSHIFT_EXPR
:
4276 /* Ensure second operand isn't wider than the first one. */
4277 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4278 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4279 && (GET_MODE_UNIT_PRECISION (mode
)
4280 < GET_MODE_PRECISION (op1_mode
)))
4281 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4290 if (mode
== BLKmode
)
4292 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4293 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4298 case tcc_comparison
:
4299 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4307 case tcc_exceptional
:
4308 case tcc_declaration
:
4314 switch (TREE_CODE (exp
))
4317 if (!lookup_constant_def (exp
))
4319 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4320 != (size_t) TREE_STRING_LENGTH (exp
))
4322 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4323 op0
= gen_rtx_MEM (BLKmode
, op0
);
4324 set_mem_attributes (op0
, exp
, 0);
4332 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4336 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4339 gcc_assert (COMPLEX_MODE_P (mode
));
4340 op0
= expand_debug_expr (TREE_REALPART (exp
));
4341 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4342 return gen_rtx_CONCAT (mode
, op0
, op1
);
4344 case DEBUG_EXPR_DECL
:
4345 op0
= DECL_RTL_IF_SET (exp
);
4350 op0
= gen_rtx_DEBUG_EXPR (mode
);
4351 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4352 SET_DECL_RTL (exp
, op0
);
4362 op0
= DECL_RTL_IF_SET (exp
);
4364 /* This decl was probably optimized away. */
4366 /* At least label RTXen are sometimes replaced by
4367 NOTE_INSN_DELETED_LABEL. Any notes here are not
4368 handled by copy_rtx. */
4372 || DECL_EXTERNAL (exp
)
4373 || !TREE_STATIC (exp
)
4375 || DECL_HARD_REGISTER (exp
)
4376 || DECL_IN_CONSTANT_POOL (exp
)
4377 || mode
== VOIDmode
)
4380 op0
= make_decl_rtl_for_debug (exp
);
4382 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4383 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4387 op0
= copy_rtx (op0
);
4389 if (GET_MODE (op0
) == BLKmode
4390 /* If op0 is not BLKmode, but mode is, adjust_mode
4391 below would ICE. While it is likely a FE bug,
4392 try to be robust here. See PR43166. */
4394 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4396 gcc_assert (MEM_P (op0
));
4397 op0
= adjust_address_nv (op0
, mode
, 0);
4407 inner_mode
= GET_MODE (op0
);
4409 if (mode
== inner_mode
)
4412 if (inner_mode
== VOIDmode
)
4414 if (TREE_CODE (exp
) == SSA_NAME
)
4415 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4417 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4418 if (mode
== inner_mode
)
4422 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4424 if (GET_MODE_UNIT_BITSIZE (mode
)
4425 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4426 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4427 else if (GET_MODE_UNIT_BITSIZE (mode
)
4428 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4429 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4431 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4433 else if (FLOAT_MODE_P (mode
))
4435 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4436 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4437 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4439 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4441 else if (FLOAT_MODE_P (inner_mode
))
4444 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4446 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4448 else if (GET_MODE_UNIT_PRECISION (mode
)
4449 == GET_MODE_UNIT_PRECISION (inner_mode
))
4450 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4451 else if (GET_MODE_UNIT_PRECISION (mode
)
4452 < GET_MODE_UNIT_PRECISION (inner_mode
))
4453 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4454 else if (UNARY_CLASS_P (exp
)
4455 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4457 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4459 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4465 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4467 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4468 TREE_OPERAND (exp
, 0),
4469 TREE_OPERAND (exp
, 1));
4471 return expand_debug_expr (newexp
);
4475 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4476 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4480 if (TREE_CODE (exp
) == MEM_REF
)
4482 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4483 || (GET_CODE (op0
) == PLUS
4484 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4485 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4486 Instead just use get_inner_reference. */
4489 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4491 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4494 op0
= plus_constant (inner_mode
, op0
, offset
);
4497 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4499 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4501 if (op0
== NULL_RTX
)
4504 op0
= gen_rtx_MEM (mode
, op0
);
4505 set_mem_attributes (op0
, exp
, 0);
4506 if (TREE_CODE (exp
) == MEM_REF
4507 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4508 set_mem_expr (op0
, NULL_TREE
);
4509 set_mem_addr_space (op0
, as
);
4513 case TARGET_MEM_REF
:
4514 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4515 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4518 op0
= expand_debug_expr
4519 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4523 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4524 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4526 if (op0
== NULL_RTX
)
4529 op0
= gen_rtx_MEM (mode
, op0
);
4531 set_mem_attributes (op0
, exp
, 0);
4532 set_mem_addr_space (op0
, as
);
4538 case ARRAY_RANGE_REF
:
4543 case VIEW_CONVERT_EXPR
:
4546 poly_int64 bitsize
, bitpos
;
4548 int reversep
, volatilep
= 0;
4550 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4551 &unsignedp
, &reversep
, &volatilep
);
4554 if (known_eq (bitsize
, 0))
4557 orig_op0
= op0
= expand_debug_expr (tem
);
4564 machine_mode addrmode
, offmode
;
4569 op0
= XEXP (op0
, 0);
4570 addrmode
= GET_MODE (op0
);
4571 if (addrmode
== VOIDmode
)
4574 op1
= expand_debug_expr (offset
);
4578 offmode
= GET_MODE (op1
);
4579 if (offmode
== VOIDmode
)
4580 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4582 if (addrmode
!= offmode
)
4583 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4585 /* Don't use offset_address here, we don't need a
4586 recognizable address, and we don't want to generate
4588 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4594 if (mode1
== VOIDmode
)
4596 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4599 mode1
= smallest_int_mode_for_size (bitsize
);
4601 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4602 if (maybe_ne (bytepos
, 0))
4604 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4605 bitpos
= num_trailing_bits (bitpos
);
4607 else if (known_eq (bitpos
, 0)
4608 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4609 op0
= adjust_address_nv (op0
, mode
, 0);
4610 else if (GET_MODE (op0
) != mode1
)
4611 op0
= adjust_address_nv (op0
, mode1
, 0);
4613 op0
= copy_rtx (op0
);
4614 if (op0
== orig_op0
)
4615 op0
= shallow_copy_rtx (op0
);
4616 set_mem_attributes (op0
, exp
, 0);
4619 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4622 if (maybe_lt (bitpos
, 0))
4625 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4629 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4630 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4632 machine_mode opmode
= GET_MODE (op0
);
4634 if (opmode
== VOIDmode
)
4635 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4637 /* This condition may hold if we're expanding the address
4638 right past the end of an array that turned out not to
4639 be addressable (i.e., the address was only computed in
4640 debug stmts). The gen_subreg below would rightfully
4641 crash, and the address doesn't really exist, so just
4643 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4646 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4647 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4650 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4651 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4653 : ZERO_EXTRACT
, mode
,
4654 GET_MODE (op0
) != VOIDmode
4656 : TYPE_MODE (TREE_TYPE (tem
)),
4657 op0
, gen_int_mode (bitsize
, word_mode
),
4658 gen_int_mode (bitpos
, word_mode
));
4663 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4666 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4669 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4672 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4674 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4677 case FIX_TRUNC_EXPR
:
4678 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4681 case POINTER_PLUS_EXPR
:
4682 /* For the rare target where pointers are not the same size as
4683 size_t, we need to check for mis-matched modes and correct
4686 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4687 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4688 && op0_mode
!= op1_mode
)
4690 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4691 /* If OP0 is a partial mode, then we must truncate, even
4692 if it has the same bitsize as OP1 as GCC's
4693 representation of partial modes is opaque. */
4694 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4695 && (GET_MODE_BITSIZE (op0_mode
)
4696 == GET_MODE_BITSIZE (op1_mode
))))
4697 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4699 /* We always sign-extend, regardless of the signedness of
4700 the operand, because the operand is always unsigned
4701 here even if the original C expression is signed. */
4702 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4706 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4709 case POINTER_DIFF_EXPR
:
4710 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4713 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4716 case TRUNC_DIV_EXPR
:
4717 case EXACT_DIV_EXPR
:
4719 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4721 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4723 case TRUNC_MOD_EXPR
:
4724 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4726 case FLOOR_DIV_EXPR
:
4728 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4731 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4732 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4733 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4734 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4737 case FLOOR_MOD_EXPR
:
4739 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4742 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4743 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4744 adj
= simplify_gen_unary (NEG
, mode
,
4745 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4747 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4753 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4754 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4755 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4756 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4760 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4761 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4762 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4763 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4769 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4770 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
4771 adj
= simplify_gen_unary (NEG
, mode
,
4772 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4774 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4778 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4779 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
4780 adj
= simplify_gen_unary (NEG
, mode
,
4781 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4783 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4786 case ROUND_DIV_EXPR
:
4789 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4790 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4791 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4792 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4796 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4797 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4798 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4799 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4802 case ROUND_MOD_EXPR
:
4805 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
4806 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
4807 adj
= simplify_gen_unary (NEG
, mode
,
4808 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4810 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4814 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4815 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
4816 adj
= simplify_gen_unary (NEG
, mode
,
4817 simplify_gen_binary (MULT
, mode
, adj
, op1
),
4819 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
4823 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
4827 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
4829 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
4832 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
4835 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
4838 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
4841 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
4844 case TRUTH_AND_EXPR
:
4845 return simplify_gen_binary (AND
, mode
, op0
, op1
);
4849 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
4852 case TRUTH_XOR_EXPR
:
4853 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
4855 case TRUTH_ANDIF_EXPR
:
4856 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
4858 case TRUTH_ORIF_EXPR
:
4859 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
4861 case TRUTH_NOT_EXPR
:
4862 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
4865 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
4869 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
4873 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
4877 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
4881 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
4884 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
4886 case UNORDERED_EXPR
:
4887 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
4890 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
4893 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
4896 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
4899 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
4902 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
4905 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
4908 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
4911 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
4914 gcc_assert (COMPLEX_MODE_P (mode
));
4915 if (GET_MODE (op0
) == VOIDmode
)
4916 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
4917 if (GET_MODE (op1
) == VOIDmode
)
4918 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
4919 return gen_rtx_CONCAT (mode
, op0
, op1
);
4922 if (GET_CODE (op0
) == CONCAT
)
4923 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
4924 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
4926 GET_MODE_INNER (mode
)));
4929 scalar_mode imode
= GET_MODE_INNER (mode
);
4934 re
= adjust_address_nv (op0
, imode
, 0);
4935 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
4939 scalar_int_mode ifmode
;
4940 scalar_int_mode ihmode
;
4942 if (!int_mode_for_mode (mode
).exists (&ifmode
)
4943 || !int_mode_for_mode (imode
).exists (&ihmode
))
4945 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
4948 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
4949 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
4950 if (imode
!= ihmode
)
4951 re
= gen_rtx_SUBREG (imode
, re
, 0);
4952 im
= copy_rtx (op0
);
4954 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
4955 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
4956 if (imode
!= ihmode
)
4957 im
= gen_rtx_SUBREG (imode
, im
, 0);
4959 im
= gen_rtx_NEG (imode
, im
);
4960 return gen_rtx_CONCAT (mode
, re
, im
);
4964 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4965 if (!op0
|| !MEM_P (op0
))
4967 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
4968 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
4969 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
4970 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
4971 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
4972 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
4974 if (handled_component_p (TREE_OPERAND (exp
, 0)))
4976 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
4979 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
4980 &bitsize
, &maxsize
, &reverse
);
4982 || TREE_CODE (decl
) == PARM_DECL
4983 || TREE_CODE (decl
) == RESULT_DECL
)
4984 && (!TREE_ADDRESSABLE (decl
)
4985 || target_for_debug_bind (decl
))
4986 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
4987 && known_gt (bitsize
, 0)
4988 && known_eq (bitsize
, maxsize
))
4990 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
4991 return plus_constant (mode
, base
, byteoffset
);
4995 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
4996 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
4999 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5002 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
5003 || (GET_CODE (op0
) == PLUS
5004 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
5005 && CONST_INT_P (XEXP (op0
, 1)))))
5007 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5010 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
5013 return plus_constant (mode
, op0
, offset
);
5020 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
5021 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5022 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
5028 unsigned HOST_WIDE_INT i
, nelts
;
5030 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
5033 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5035 for (i
= 0; i
< nelts
; ++i
)
5037 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
5040 XVECEXP (op0
, 0, i
) = op1
;
5047 if (TREE_CLOBBER_P (exp
))
5049 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5052 unsigned HOST_WIDE_INT nelts
;
5055 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5056 goto flag_unsupported
;
5058 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5060 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5062 op1
= expand_debug_expr (val
);
5065 XVECEXP (op0
, 0, i
) = op1
;
5070 op1
= expand_debug_expr
5071 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5076 for (; i
< nelts
; i
++)
5077 XVECEXP (op0
, 0, i
) = op1
;
5083 goto flag_unsupported
;
5086 /* ??? Maybe handle some builtins? */
5091 gimple
*g
= get_gimple_for_ssa_name (exp
);
5095 if (deep_ter_debug_map
)
5097 tree
*slot
= deep_ter_debug_map
->get (exp
);
5102 t
= gimple_assign_rhs_to_tree (g
);
5103 op0
= expand_debug_expr (t
);
5109 /* If this is a reference to an incoming value of
5110 parameter that is never used in the code or where the
5111 incoming value is never used in the code, use
5112 PARM_DECL's DECL_RTL if set. */
5113 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5114 && SSA_NAME_VAR (exp
)
5115 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5116 && has_zero_uses (exp
))
5118 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5121 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5126 int part
= var_to_partition (SA
.map
, exp
);
5128 if (part
== NO_PARTITION
)
5131 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5133 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5141 /* Vector stuff. For most of the codes we don't have rtl codes. */
5142 case REALIGN_LOAD_EXPR
:
5144 case VEC_PACK_FIX_TRUNC_EXPR
:
5145 case VEC_PACK_FLOAT_EXPR
:
5146 case VEC_PACK_SAT_EXPR
:
5147 case VEC_PACK_TRUNC_EXPR
:
5148 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
5149 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
5150 case VEC_UNPACK_FLOAT_HI_EXPR
:
5151 case VEC_UNPACK_FLOAT_LO_EXPR
:
5152 case VEC_UNPACK_HI_EXPR
:
5153 case VEC_UNPACK_LO_EXPR
:
5154 case VEC_WIDEN_MULT_HI_EXPR
:
5155 case VEC_WIDEN_MULT_LO_EXPR
:
5156 case VEC_WIDEN_MULT_EVEN_EXPR
:
5157 case VEC_WIDEN_MULT_ODD_EXPR
:
5158 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5159 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5161 case VEC_DUPLICATE_EXPR
:
5162 case VEC_SERIES_EXPR
:
5167 case ADDR_SPACE_CONVERT_EXPR
:
5168 case FIXED_CONVERT_EXPR
:
5170 case WITH_SIZE_EXPR
:
5171 case BIT_INSERT_EXPR
:
5175 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5176 && SCALAR_INT_MODE_P (mode
))
5179 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5181 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5184 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5186 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5188 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5189 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5193 case WIDEN_MULT_EXPR
:
5194 case WIDEN_MULT_PLUS_EXPR
:
5195 case WIDEN_MULT_MINUS_EXPR
:
5196 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5197 && SCALAR_INT_MODE_P (mode
))
5199 inner_mode
= GET_MODE (op0
);
5200 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5201 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5203 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5204 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5205 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5207 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5208 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5209 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5211 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5212 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5214 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5218 case MULT_HIGHPART_EXPR
:
5219 /* ??? Similar to the above. */
5222 case WIDEN_SUM_EXPR
:
5223 case WIDEN_LSHIFT_EXPR
:
5224 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5225 && SCALAR_INT_MODE_P (mode
))
5228 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5230 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5232 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5233 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5248 /* Return an RTX equivalent to the source bind value of the tree expression
5252 expand_debug_source_expr (tree exp
)
5255 machine_mode mode
= VOIDmode
, inner_mode
;
5257 switch (TREE_CODE (exp
))
5260 if (DECL_ABSTRACT_ORIGIN (exp
))
5261 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp
));
5265 mode
= DECL_MODE (exp
);
5266 op0
= expand_debug_parm_decl (exp
);
5269 /* See if this isn't an argument that has been completely
5271 if (!DECL_RTL_SET_P (exp
)
5272 && !DECL_INCOMING_RTL (exp
)
5273 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5275 tree aexp
= DECL_ORIGIN (exp
);
5276 if (DECL_CONTEXT (aexp
)
5277 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5279 vec
<tree
, va_gc
> **debug_args
;
5282 debug_args
= decl_debug_args_lookup (current_function_decl
);
5283 if (debug_args
!= NULL
)
5285 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5288 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5298 if (op0
== NULL_RTX
)
5301 inner_mode
= GET_MODE (op0
);
5302 if (mode
== inner_mode
)
5305 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5307 if (GET_MODE_UNIT_BITSIZE (mode
)
5308 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5309 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5310 else if (GET_MODE_UNIT_BITSIZE (mode
)
5311 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5312 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5314 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5316 else if (FLOAT_MODE_P (mode
))
5318 else if (FLOAT_MODE_P (inner_mode
))
5320 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5321 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5323 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5325 else if (GET_MODE_UNIT_PRECISION (mode
)
5326 == GET_MODE_UNIT_PRECISION (inner_mode
))
5327 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5328 else if (GET_MODE_UNIT_PRECISION (mode
)
5329 < GET_MODE_UNIT_PRECISION (inner_mode
))
5330 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5331 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5332 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5334 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5339 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5340 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5341 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5344 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5348 if (exp
== NULL_RTX
)
5351 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5356 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5357 rtx dval
= make_debug_expr_from_rtl (exp
);
5359 /* Emit a debug bind insn before INSN. */
5360 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5361 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5362 VAR_INIT_STATUS_INITIALIZED
);
5364 emit_debug_insn_before (bind
, insn
);
5369 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5371 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5372 switch (*format_ptr
++)
5375 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5380 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5381 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5389 /* Expand the _LOCs in debug insns. We run this after expanding all
5390 regular insns, so that any variables referenced in the function
5391 will have their DECL_RTLs set. */
5394 expand_debug_locations (void)
5397 rtx_insn
*last
= get_last_insn ();
5398 int save_strict_alias
= flag_strict_aliasing
;
5400 /* New alias sets while setting up memory attributes cause
5401 -fcompare-debug failures, even though it doesn't bring about any
5403 flag_strict_aliasing
= 0;
5405 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5406 if (DEBUG_BIND_INSN_P (insn
))
5408 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5410 rtx_insn
*prev_insn
, *insn2
;
5413 if (value
== NULL_TREE
)
5417 if (INSN_VAR_LOCATION_STATUS (insn
)
5418 == VAR_INIT_STATUS_UNINITIALIZED
)
5419 val
= expand_debug_source_expr (value
);
5420 /* The avoid_deep_ter_for_debug function inserts
5421 debug bind stmts after SSA_NAME definition, with the
5422 SSA_NAME as the whole bind location. Disable temporarily
5423 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5424 being defined in this DEBUG_INSN. */
5425 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5427 tree
*slot
= deep_ter_debug_map
->get (value
);
5430 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5435 val
= expand_debug_expr (value
);
5437 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5440 val
= expand_debug_expr (value
);
5441 gcc_assert (last
== get_last_insn ());
5445 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5448 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5450 gcc_assert (mode
== GET_MODE (val
)
5451 || (GET_MODE (val
) == VOIDmode
5452 && (CONST_SCALAR_INT_P (val
)
5453 || GET_CODE (val
) == CONST_FIXED
5454 || GET_CODE (val
) == LABEL_REF
)));
5457 INSN_VAR_LOCATION_LOC (insn
) = val
;
5458 prev_insn
= PREV_INSN (insn
);
5459 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5460 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5463 flag_strict_aliasing
= save_strict_alias
;
5466 /* Performs swapping operands of commutative operations to expand
5467 the expensive one first. */
5470 reorder_operands (basic_block bb
)
5472 unsigned int *lattice
; /* Hold cost of each statement. */
5473 unsigned int i
= 0, n
= 0;
5474 gimple_stmt_iterator gsi
;
5480 use_operand_p use_p
;
5481 gimple
*def0
, *def1
;
5483 /* Compute cost of each statement using estimate_num_insns. */
5484 stmts
= bb_seq (bb
);
5485 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5487 stmt
= gsi_stmt (gsi
);
5488 if (!is_gimple_debug (stmt
))
5489 gimple_set_uid (stmt
, n
++);
5491 lattice
= XNEWVEC (unsigned int, n
);
5492 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5495 stmt
= gsi_stmt (gsi
);
5496 if (is_gimple_debug (stmt
))
5498 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5500 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5502 tree use
= USE_FROM_PTR (use_p
);
5504 if (TREE_CODE (use
) != SSA_NAME
)
5506 def_stmt
= get_gimple_for_ssa_name (use
);
5509 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5512 if (!is_gimple_assign (stmt
)
5513 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5515 op0
= gimple_op (stmt
, 1);
5516 op1
= gimple_op (stmt
, 2);
5517 if (TREE_CODE (op0
) != SSA_NAME
5518 || TREE_CODE (op1
) != SSA_NAME
)
5520 /* Swap operands if the second one is more expensive. */
5521 def0
= get_gimple_for_ssa_name (op0
);
5522 def1
= get_gimple_for_ssa_name (op1
);
5526 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5530 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5532 fprintf (dump_file
, "Swap operands in stmt:\n");
5533 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5534 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5535 def0
? lattice
[gimple_uid (def0
)] : 0,
5536 lattice
[gimple_uid (def1
)]);
5538 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5539 gimple_assign_rhs2_ptr (stmt
));
5545 /* Expand basic block BB from GIMPLE trees to RTL. */
5548 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5550 gimple_stmt_iterator gsi
;
5552 gimple
*stmt
= NULL
;
5553 rtx_note
*note
= NULL
;
5559 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5562 /* Note that since we are now transitioning from GIMPLE to RTL, we
5563 cannot use the gsi_*_bb() routines because they expect the basic
5564 block to be in GIMPLE, instead of RTL. Therefore, we need to
5565 access the BB sequence directly. */
5567 reorder_operands (bb
);
5568 stmts
= bb_seq (bb
);
5569 bb
->il
.gimple
.seq
= NULL
;
5570 bb
->il
.gimple
.phi_nodes
= NULL
;
5571 rtl_profile_for_bb (bb
);
5572 init_rtl_bb_info (bb
);
5573 bb
->flags
|= BB_RTL
;
5575 /* Remove the RETURN_EXPR if we may fall though to the exit
5577 gsi
= gsi_last (stmts
);
5578 if (!gsi_end_p (gsi
)
5579 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5581 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5583 gcc_assert (single_succ_p (bb
));
5584 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5586 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5587 && !gimple_return_retval (ret_stmt
))
5589 gsi_remove (&gsi
, false);
5590 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5594 gsi
= gsi_start (stmts
);
5595 if (!gsi_end_p (gsi
))
5597 stmt
= gsi_stmt (gsi
);
5598 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5602 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5606 gcc_checking_assert (!note
);
5607 last
= get_last_insn ();
5611 expand_gimple_stmt (stmt
);
5618 BB_HEAD (bb
) = NEXT_INSN (last
);
5619 if (NOTE_P (BB_HEAD (bb
)))
5620 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5621 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5622 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5624 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5627 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5630 NOTE_BASIC_BLOCK (note
) = bb
;
5632 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5636 stmt
= gsi_stmt (gsi
);
5638 /* If this statement is a non-debug one, and we generate debug
5639 insns, then this one might be the last real use of a TERed
5640 SSA_NAME, but where there are still some debug uses further
5641 down. Expanding the current SSA name in such further debug
5642 uses by their RHS might lead to wrong debug info, as coalescing
5643 might make the operands of such RHS be placed into the same
5644 pseudo as something else. Like so:
5645 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5649 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5650 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5651 the write to a_2 would actually have clobbered the place which
5654 So, instead of that, we recognize the situation, and generate
5655 debug temporaries at the last real use of TERed SSA names:
5662 if (MAY_HAVE_DEBUG_BIND_INSNS
5664 && !is_gimple_debug (stmt
))
5670 location_t sloc
= curr_insn_location ();
5672 /* Look for SSA names that have their last use here (TERed
5673 names always have only one real use). */
5674 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5675 if ((def
= get_gimple_for_ssa_name (op
)))
5677 imm_use_iterator imm_iter
;
5678 use_operand_p use_p
;
5679 bool have_debug_uses
= false;
5681 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5683 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5685 have_debug_uses
= true;
5690 if (have_debug_uses
)
5692 /* OP is a TERed SSA name, with DEF its defining
5693 statement, and where OP is used in further debug
5694 instructions. Generate a debug temporary, and
5695 replace all uses of OP in debug insns with that
5698 tree value
= gimple_assign_rhs_to_tree (def
);
5699 tree vexpr
= make_node (DEBUG_EXPR_DECL
);
5703 set_curr_insn_location (gimple_location (def
));
5705 DECL_ARTIFICIAL (vexpr
) = 1;
5706 TREE_TYPE (vexpr
) = TREE_TYPE (value
);
5708 mode
= DECL_MODE (value
);
5710 mode
= TYPE_MODE (TREE_TYPE (value
));
5711 SET_DECL_MODE (vexpr
, mode
);
5713 val
= gen_rtx_VAR_LOCATION
5714 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5716 emit_debug_insn (val
);
5718 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5720 if (!gimple_debug_bind_p (debugstmt
))
5723 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5724 SET_USE (use_p
, vexpr
);
5726 update_stmt (debugstmt
);
5730 set_curr_insn_location (sloc
);
5733 currently_expanding_gimple_stmt
= stmt
;
5735 /* Expand this statement, then evaluate the resulting RTL and
5736 fixup the CFG accordingly. */
5737 if (gimple_code (stmt
) == GIMPLE_COND
)
5739 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
5743 else if (is_gimple_debug (stmt
))
5745 location_t sloc
= curr_insn_location ();
5746 gimple_stmt_iterator nsi
= gsi
;
5751 tree value
= NULL_TREE
;
5755 if (!gimple_debug_nonbind_marker_p (stmt
))
5757 if (gimple_debug_bind_p (stmt
))
5759 var
= gimple_debug_bind_get_var (stmt
);
5761 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
5762 && TREE_CODE (var
) != LABEL_DECL
5763 && !target_for_debug_bind (var
))
5764 goto delink_debug_stmt
;
5767 mode
= DECL_MODE (var
);
5769 mode
= TYPE_MODE (TREE_TYPE (var
));
5771 if (gimple_debug_bind_has_value_p (stmt
))
5772 value
= gimple_debug_bind_get_value (stmt
);
5774 val
= gen_rtx_VAR_LOCATION
5775 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5777 else if (gimple_debug_source_bind_p (stmt
))
5779 var
= gimple_debug_source_bind_get_var (stmt
);
5781 value
= gimple_debug_source_bind_get_value (stmt
);
5783 mode
= DECL_MODE (var
);
5785 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
5786 VAR_INIT_STATUS_UNINITIALIZED
);
5791 /* If this function was first compiled with markers
5792 enabled, but they're now disable (e.g. LTO), drop
5793 them on the floor. */
5794 else if (gimple_debug_nonbind_marker_p (stmt
)
5795 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
5796 goto delink_debug_stmt
;
5797 else if (gimple_debug_begin_stmt_p (stmt
))
5798 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
5799 else if (gimple_debug_inline_entry_p (stmt
))
5801 tree block
= gimple_block (stmt
);
5804 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
5806 goto delink_debug_stmt
;
5811 last
= get_last_insn ();
5813 set_curr_insn_location (gimple_location (stmt
));
5815 emit_debug_insn (val
);
5817 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5819 /* We can't dump the insn with a TREE where an RTX
5821 if (GET_CODE (val
) == VAR_LOCATION
)
5823 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
5824 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
5826 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5827 if (GET_CODE (val
) == VAR_LOCATION
)
5828 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
5832 /* In order not to generate too many debug temporaries,
5833 we delink all uses of debug statements we already expanded.
5834 Therefore debug statements between definition and real
5835 use of TERed SSA names will continue to use the SSA name,
5836 and not be replaced with debug temps. */
5837 delink_stmt_imm_use (stmt
);
5841 if (gsi_end_p (nsi
))
5843 stmt
= gsi_stmt (nsi
);
5844 if (!is_gimple_debug (stmt
))
5848 set_curr_insn_location (sloc
);
5852 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
5854 && gimple_call_tail_p (call_stmt
)
5855 && disable_tail_calls
)
5856 gimple_call_set_tail (call_stmt
, false);
5858 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
5861 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
5872 def_operand_p def_p
;
5873 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
5877 /* Ignore this stmt if it is in the list of
5878 replaceable expressions. */
5880 && bitmap_bit_p (SA
.values
,
5881 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
5884 last
= expand_gimple_stmt (stmt
);
5885 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5890 currently_expanding_gimple_stmt
= NULL
;
5892 /* Expand implicit goto and convert goto_locus. */
5893 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
5895 if (e
->goto_locus
!= UNKNOWN_LOCATION
)
5896 set_curr_insn_location (e
->goto_locus
);
5897 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
5899 emit_jump (label_rtx_for_bb (e
->dest
));
5900 e
->flags
&= ~EDGE_FALLTHRU
;
5904 /* Expanded RTL can create a jump in the last instruction of block.
5905 This later might be assumed to be a jump to successor and break edge insertion.
5906 We need to insert dummy move to prevent this. PR41440. */
5907 if (single_succ_p (bb
)
5908 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
5909 && (last
= get_last_insn ())
5911 || (DEBUG_INSN_P (last
)
5912 && JUMP_P (prev_nondebug_insn (last
)))))
5914 rtx dummy
= gen_reg_rtx (SImode
);
5915 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
5918 do_pending_stack_adjust ();
5920 /* Find the block tail. The last insn in the block is the insn
5921 before a barrier and/or table jump insn. */
5922 last
= get_last_insn ();
5923 if (BARRIER_P (last
))
5924 last
= PREV_INSN (last
);
5925 if (JUMP_TABLE_DATA_P (last
))
5926 last
= PREV_INSN (PREV_INSN (last
));
5927 if (BARRIER_P (last
))
5928 last
= PREV_INSN (last
);
5931 update_bb_for_insn (bb
);
5937 /* Create a basic block for initialization code. */
5940 construct_init_block (void)
5942 basic_block init_block
, first_block
;
5946 /* Multiple entry points not supported yet. */
5947 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
5948 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5949 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
5950 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5951 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
5953 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
5955 /* When entry edge points to first basic block, we don't need jump,
5956 otherwise we have to jump into proper target. */
5957 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
5959 tree label
= gimple_block_label (e
->dest
);
5961 emit_jump (jump_target_rtx (label
));
5965 flags
= EDGE_FALLTHRU
;
5967 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
5969 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
5970 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
5971 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
5974 first_block
= e
->dest
;
5975 redirect_edge_succ (e
, init_block
);
5976 make_single_succ_edge (init_block
, first_block
, flags
);
5979 make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
5982 update_bb_for_insn (init_block
);
5986 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
5987 found in the block tree. */
5990 set_block_levels (tree block
, int level
)
5994 BLOCK_NUMBER (block
) = level
;
5995 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
5996 block
= BLOCK_CHAIN (block
);
6000 /* Create a block containing landing pads and similar stuff. */
6003 construct_exit_block (void)
6005 rtx_insn
*head
= get_last_insn ();
6007 basic_block exit_block
;
6011 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
6012 rtx_insn
*orig_end
= BB_END (prev_bb
);
6014 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6016 /* Make sure the locus is set to the end of the function, so that
6017 epilogue line numbers and warnings are set properly. */
6018 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
6019 input_location
= cfun
->function_end_locus
;
6021 /* Generate rtl for function exit. */
6022 expand_function_end ();
6024 end
= get_last_insn ();
6027 /* While emitting the function end we could move end of the last basic
6029 BB_END (prev_bb
) = orig_end
;
6030 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
6031 head
= NEXT_INSN (head
);
6032 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6033 bb count counting will be confused. Any instructions before that
6034 label are emitted for the case where PREV_BB falls through into the
6035 exit block, so append those instructions to prev_bb in that case. */
6036 if (NEXT_INSN (head
) != return_label
)
6038 while (NEXT_INSN (head
) != return_label
)
6040 if (!NOTE_P (NEXT_INSN (head
)))
6041 BB_END (prev_bb
) = NEXT_INSN (head
);
6042 head
= NEXT_INSN (head
);
6045 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
6046 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
6047 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6050 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
6052 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6053 if (!(e
->flags
& EDGE_ABNORMAL
))
6054 redirect_edge_succ (e
, exit_block
);
6059 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6061 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6064 exit_block
->count
-= e2
->count ();
6066 update_bb_for_insn (exit_block
);
6069 /* Helper function for discover_nonconstant_array_refs.
6070 Look for ARRAY_REF nodes with non-constant indexes and mark them
6074 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6075 void *data ATTRIBUTE_UNUSED
)
6079 if (IS_TYPE_OR_DECL_P (t
))
6081 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6083 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6084 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6085 && (!TREE_OPERAND (t
, 2)
6086 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6087 || (TREE_CODE (t
) == COMPONENT_REF
6088 && (!TREE_OPERAND (t
,2)
6089 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6090 || TREE_CODE (t
) == BIT_FIELD_REF
6091 || TREE_CODE (t
) == REALPART_EXPR
6092 || TREE_CODE (t
) == IMAGPART_EXPR
6093 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6094 || CONVERT_EXPR_P (t
))
6095 t
= TREE_OPERAND (t
, 0);
6097 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6099 t
= get_base_address (t
);
6101 && DECL_MODE (t
) != BLKmode
)
6102 TREE_ADDRESSABLE (t
) = 1;
6107 /* References of size POLY_INT_CST to a fixed-size object must go
6108 through memory. It's more efficient to force that here than
6109 to create temporary slots on the fly. */
6110 else if ((TREE_CODE (t
) == MEM_REF
|| TREE_CODE (t
) == TARGET_MEM_REF
)
6111 && TYPE_SIZE (TREE_TYPE (t
))
6112 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t
))))
6114 tree base
= get_base_address (t
);
6117 && DECL_MODE (base
) != BLKmode
6118 && GET_MODE_SIZE (DECL_MODE (base
)).is_constant ())
6119 TREE_ADDRESSABLE (base
) = 1;
6126 /* RTL expansion is not able to compile array references with variable
6127 offsets for arrays stored in single register. Discover such
6128 expressions and mark variables as addressable to avoid this
6132 discover_nonconstant_array_refs (void)
6135 gimple_stmt_iterator gsi
;
6137 FOR_EACH_BB_FN (bb
, cfun
)
6138 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6140 gimple
*stmt
= gsi_stmt (gsi
);
6141 if (!is_gimple_debug (stmt
))
6143 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, NULL
);
6144 gcall
*call
= dyn_cast
<gcall
*> (stmt
);
6145 if (call
&& gimple_call_internal_p (call
))
6146 switch (gimple_call_internal_fn (call
))
6148 case IFN_LOAD_LANES
:
6149 /* The source must be a MEM. */
6150 mark_addressable (gimple_call_arg (call
, 0));
6152 case IFN_STORE_LANES
:
6153 /* The destination must be a MEM. */
6154 mark_addressable (gimple_call_lhs (call
));
6163 /* This function sets crtl->args.internal_arg_pointer to a virtual
6164 register if DRAP is needed. Local register allocator will replace
6165 virtual_incoming_args_rtx with the virtual register. */
6168 expand_stack_alignment (void)
6171 unsigned int preferred_stack_boundary
;
6173 if (! SUPPORTS_STACK_ALIGNMENT
)
6176 if (cfun
->calls_alloca
6177 || cfun
->has_nonlocal_label
6178 || crtl
->has_nonlocal_goto
)
6179 crtl
->need_drap
= true;
6181 /* Call update_stack_boundary here again to update incoming stack
6182 boundary. It may set incoming stack alignment to a different
6183 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6184 use the minimum incoming stack alignment to check if it is OK
6185 to perform sibcall optimization since sibcall optimization will
6186 only align the outgoing stack to incoming stack boundary. */
6187 if (targetm
.calls
.update_stack_boundary
)
6188 targetm
.calls
.update_stack_boundary ();
6190 /* The incoming stack frame has to be aligned at least at
6191 parm_stack_boundary. */
6192 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6194 /* Update crtl->stack_alignment_estimated and use it later to align
6195 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6196 exceptions since callgraph doesn't collect incoming stack alignment
6198 if (cfun
->can_throw_non_call_exceptions
6199 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6200 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6202 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6203 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6204 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6205 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6206 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6208 gcc_assert (crtl
->stack_alignment_needed
6209 <= crtl
->stack_alignment_estimated
);
6211 crtl
->stack_realign_needed
6212 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6213 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6215 crtl
->stack_realign_processed
= true;
6217 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6219 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6220 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6222 /* stack_realign_drap and drap_rtx must match. */
6223 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6225 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6226 if (drap_rtx
!= NULL
)
6228 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6230 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6232 fixup_tail_calls ();
6238 expand_main_function (void)
6240 #if (defined(INVOKE__main) \
6241 || (!defined(HAS_INIT_SECTION) \
6242 && !defined(INIT_SECTION_ASM_OP) \
6243 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6244 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6249 /* Expand code to initialize the stack_protect_guard. This is invoked at
6250 the beginning of a function to be protected. */
6253 stack_protect_prologue (void)
6255 tree guard_decl
= targetm
.stack_protect_guard ();
6258 crtl
->stack_protect_guard_decl
= guard_decl
;
6259 x
= expand_normal (crtl
->stack_protect_guard
);
6261 if (targetm
.have_stack_protect_combined_set () && guard_decl
)
6263 gcc_assert (DECL_P (guard_decl
));
6264 y
= DECL_RTL (guard_decl
);
6266 /* Allow the target to compute address of Y and copy it to X without
6267 leaking Y into a register. This combined address + copy pattern
6268 allows the target to prevent spilling of any intermediate results by
6269 splitting it after register allocator. */
6270 if (rtx_insn
*insn
= targetm
.gen_stack_protect_combined_set (x
, y
))
6278 y
= expand_normal (guard_decl
);
6282 /* Allow the target to copy from Y to X without leaking Y into a
6284 if (targetm
.have_stack_protect_set ())
6285 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6291 /* Otherwise do a straight move. */
6292 emit_move_insn (x
, y
);
6295 /* Translate the intermediate representation contained in the CFG
6296 from GIMPLE trees to RTL.
6298 We do conversion per basic block and preserve/update the tree CFG.
6299 This implies we have to do some magic as the CFG can simultaneously
6300 consist of basic blocks containing RTL and GIMPLE trees. This can
6301 confuse the CFG hooks, so be careful to not manipulate CFG during
6306 const pass_data pass_data_expand
=
6308 RTL_PASS
, /* type */
6309 "expand", /* name */
6310 OPTGROUP_NONE
, /* optinfo_flags */
6311 TV_EXPAND
, /* tv_id */
6312 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6315 | PROP_gimple_lva
), /* properties_required */
6316 PROP_rtl
, /* properties_provided */
6317 ( PROP_ssa
| PROP_trees
), /* properties_destroyed */
6318 0, /* todo_flags_start */
6319 0, /* todo_flags_finish */
6322 class pass_expand
: public rtl_opt_pass
6325 pass_expand (gcc::context
*ctxt
)
6326 : rtl_opt_pass (pass_data_expand
, ctxt
)
6329 /* opt_pass methods: */
6330 virtual unsigned int execute (function
*);
6332 }; // class pass_expand
6335 pass_expand::execute (function
*fun
)
6337 basic_block bb
, init_block
;
6340 rtx_insn
*var_seq
, *var_ret_seq
;
6343 timevar_push (TV_OUT_OF_SSA
);
6344 rewrite_out_of_ssa (&SA
);
6345 timevar_pop (TV_OUT_OF_SSA
);
6346 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6348 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6350 gimple_stmt_iterator gsi
;
6351 FOR_EACH_BB_FN (bb
, cfun
)
6352 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6353 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6354 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6357 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6358 discover_nonconstant_array_refs ();
6360 /* Make sure all values used by the optimization passes have sane
6364 /* Some backends want to know that we are expanding to RTL. */
6365 currently_expanding_to_rtl
= 1;
6366 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6367 free_dominance_info (CDI_DOMINATORS
);
6369 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6371 insn_locations_init ();
6372 if (!DECL_IS_BUILTIN (current_function_decl
))
6374 /* Eventually, all FEs should explicitly set function_start_locus. */
6375 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6376 set_curr_insn_location
6377 (DECL_SOURCE_LOCATION (current_function_decl
));
6379 set_curr_insn_location (fun
->function_start_locus
);
6382 set_curr_insn_location (UNKNOWN_LOCATION
);
6383 prologue_location
= curr_insn_location ();
6385 #ifdef INSN_SCHEDULING
6386 init_sched_attrs ();
6389 /* Make sure first insn is a note even if we don't want linenums.
6390 This makes sure the first insn will never be deleted.
6391 Also, final expects a note to appear there. */
6392 emit_note (NOTE_INSN_DELETED
);
6394 targetm
.expand_to_rtl_hook ();
6395 crtl
->init_stack_alignment ();
6396 fun
->cfg
->max_jumptable_ents
= 0;
6398 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6399 of the function section at exapnsion time to predict distance of calls. */
6400 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6402 /* Expand the variables recorded during gimple lowering. */
6403 timevar_push (TV_VAR_EXPAND
);
6406 var_ret_seq
= expand_used_vars ();
6408 var_seq
= get_insns ();
6410 timevar_pop (TV_VAR_EXPAND
);
6412 /* Honor stack protection warnings. */
6413 if (warn_stack_protect
)
6415 if (fun
->calls_alloca
)
6416 warning (OPT_Wstack_protector
,
6417 "stack protector not protecting local variables: "
6418 "variable length buffer");
6419 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6420 warning (OPT_Wstack_protector
,
6421 "stack protector not protecting function: "
6422 "all local arrays are less than %d bytes long",
6423 (int) param_ssp_buffer_size
);
6426 /* Set up parameters and prepare for return, for the function. */
6427 expand_function_start (current_function_decl
);
6429 /* If we emitted any instructions for setting up the variables,
6430 emit them before the FUNCTION_START note. */
6433 emit_insn_before (var_seq
, parm_birth_insn
);
6435 /* In expand_function_end we'll insert the alloca save/restore
6436 before parm_birth_insn. We've just insertted an alloca call.
6437 Adjust the pointer to match. */
6438 parm_birth_insn
= var_seq
;
6441 /* Now propagate the RTL assignment of each partition to the
6442 underlying var of each SSA_NAME. */
6445 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6447 /* We might have generated new SSA names in
6448 update_alias_info_with_stack_vars. They will have a NULL
6449 defining statements, and won't be part of the partitioning,
6451 if (!SSA_NAME_DEF_STMT (name
))
6454 adjust_one_expanded_partition_var (name
);
6457 /* Clean up RTL of variables that straddle across multiple
6458 partitions, and check that the rtl of any PARM_DECLs that are not
6459 cleaned up is that of their default defs. */
6460 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6464 /* We might have generated new SSA names in
6465 update_alias_info_with_stack_vars. They will have a NULL
6466 defining statements, and won't be part of the partitioning,
6468 if (!SSA_NAME_DEF_STMT (name
))
6470 part
= var_to_partition (SA
.map
, name
);
6471 if (part
== NO_PARTITION
)
6474 /* If this decl was marked as living in multiple places, reset
6475 this now to NULL. */
6476 tree var
= SSA_NAME_VAR (name
);
6477 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6478 SET_DECL_RTL (var
, NULL
);
6479 /* Check that the pseudos chosen by assign_parms are those of
6480 the corresponding default defs. */
6481 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6482 && (TREE_CODE (var
) == PARM_DECL
6483 || TREE_CODE (var
) == RESULT_DECL
))
6485 rtx in
= DECL_RTL_IF_SET (var
);
6487 rtx out
= SA
.partition_to_pseudo
[part
];
6488 gcc_assert (in
== out
);
6490 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6491 those expected by debug backends for each parm and for
6492 the result. This is particularly important for stabs,
6493 whose register elimination from parm's DECL_RTL may cause
6494 -fcompare-debug differences as SET_DECL_RTL changes reg's
6495 attrs. So, make sure the RTL already has the parm as the
6496 EXPR, so that it won't change. */
6497 SET_DECL_RTL (var
, NULL_RTX
);
6499 set_mem_attributes (in
, var
, true);
6500 SET_DECL_RTL (var
, in
);
6504 /* If this function is `main', emit a call to `__main'
6505 to run global initializers, etc. */
6506 if (DECL_NAME (current_function_decl
)
6507 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6508 && DECL_FILE_SCOPE_P (current_function_decl
))
6509 expand_main_function ();
6511 /* Initialize the stack_protect_guard field. This must happen after the
6512 call to __main (if any) so that the external decl is initialized. */
6513 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6514 stack_protect_prologue ();
6516 expand_phi_nodes (&SA
);
6518 /* Release any stale SSA redirection data. */
6519 redirect_edge_var_map_empty ();
6521 /* Register rtl specific functions for cfg. */
6522 rtl_register_cfg_hooks ();
6524 init_block
= construct_init_block ();
6526 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6527 remaining edges later. */
6528 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6529 e
->flags
&= ~EDGE_EXECUTABLE
;
6531 /* If the function has too many markers, drop them while expanding. */
6532 if (cfun
->debug_marker_count
6533 >= param_max_debug_marker_count
)
6534 cfun
->debug_nonbind_markers
= false;
6536 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6537 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6539 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6541 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6542 expand_debug_locations ();
6544 if (deep_ter_debug_map
)
6546 delete deep_ter_debug_map
;
6547 deep_ter_debug_map
= NULL
;
6550 /* Free stuff we no longer need after GIMPLE optimizations. */
6551 free_dominance_info (CDI_DOMINATORS
);
6552 free_dominance_info (CDI_POST_DOMINATORS
);
6553 delete_tree_cfg_annotations (fun
);
6555 timevar_push (TV_OUT_OF_SSA
);
6556 finish_out_of_ssa (&SA
);
6557 timevar_pop (TV_OUT_OF_SSA
);
6559 timevar_push (TV_POST_EXPAND
);
6560 /* We are no longer in SSA form. */
6561 fun
->gimple_df
->in_ssa_p
= false;
6562 loops_state_clear (LOOP_CLOSED_SSA
);
6564 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6565 conservatively to true until they are all profile aware. */
6566 delete lab_rtx_for_bb
;
6567 free_histograms (fun
);
6569 construct_exit_block ();
6570 insn_locations_finalize ();
6574 rtx_insn
*after
= return_label
;
6575 rtx_insn
*next
= NEXT_INSN (after
);
6576 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6578 emit_insn_after (var_ret_seq
, after
);
6581 /* Zap the tree EH table. */
6582 set_eh_throw_stmt_table (fun
, NULL
);
6584 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6585 split edges which edge insertions might do. */
6586 rebuild_jump_labels (get_insns ());
6588 /* If we have a single successor to the entry block, put the pending insns
6589 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6590 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6592 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6595 rtx_insn
*insns
= e
->insns
.r
;
6597 rebuild_jump_labels_chain (insns
);
6598 if (NOTE_P (parm_birth_insn
)
6599 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6600 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6602 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6606 /* Otherwise, as well as for other edges, take the usual way. */
6607 commit_edge_insertions ();
6609 /* We're done expanding trees to RTL. */
6610 currently_expanding_to_rtl
= 0;
6612 flush_mark_addressable_queue ();
6614 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6615 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6619 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6621 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6622 e
->flags
&= ~EDGE_EXECUTABLE
;
6624 /* At the moment not all abnormal edges match the RTL
6625 representation. It is safe to remove them here as
6626 find_many_sub_basic_blocks will rediscover them.
6627 In the future we should get this fixed properly. */
6628 if ((e
->flags
& EDGE_ABNORMAL
)
6629 && !(e
->flags
& EDGE_SIBCALL
))
6636 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6637 bitmap_ones (blocks
);
6638 find_many_sub_basic_blocks (blocks
);
6639 purge_all_dead_edges ();
6641 /* After initial rtl generation, call back to finish generating
6642 exception support code. We need to do this before cleaning up
6643 the CFG as the code does not expect dead landing pads. */
6644 if (fun
->eh
->region_tree
!= NULL
)
6645 finish_eh_generation ();
6647 /* Call expand_stack_alignment after finishing all
6648 updates to crtl->preferred_stack_boundary. */
6649 expand_stack_alignment ();
6651 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
6653 if (crtl
->tail_call_emit
)
6654 fixup_tail_calls ();
6656 /* BB subdivision may have created basic blocks that are are only reachable
6657 from unlikely bbs but not marked as such in the profile. */
6659 propagate_unlikely_bbs_forward ();
6661 /* Remove unreachable blocks, otherwise we cannot compute dominators
6662 which are needed for loop state verification. As a side-effect
6663 this also compacts blocks.
6664 ??? We cannot remove trivially dead insns here as for example
6665 the DRAP reg on i?86 is not magically live at this point.
6666 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
6667 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
6669 checking_verify_flow_info ();
6671 /* Initialize pseudos allocated for hard registers. */
6672 emit_initial_value_sets ();
6674 /* And finally unshare all RTL. */
6677 /* There's no need to defer outputting this function any more; we
6678 know we want to output it. */
6679 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
6681 /* Now that we're done expanding trees to RTL, we shouldn't have any
6682 more CONCATs anywhere. */
6683 generating_concat_p
= 0;
6688 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
6689 /* And the pass manager will dump RTL for us. */
6692 /* If we're emitting a nested function, make sure its parent gets
6693 emitted as well. Doing otherwise confuses debug info. */
6696 for (parent
= DECL_CONTEXT (current_function_decl
);
6697 parent
!= NULL_TREE
;
6698 parent
= get_containing_scope (parent
))
6699 if (TREE_CODE (parent
) == FUNCTION_DECL
)
6700 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
6703 TREE_ASM_WRITTEN (current_function_decl
) = 1;
6705 /* After expanding, the return labels are no longer needed. */
6706 return_label
= NULL
;
6707 naked_return_label
= NULL
;
6709 /* After expanding, the tm_restart map is no longer needed. */
6710 if (fun
->gimple_df
->tm_restart
)
6711 fun
->gimple_df
->tm_restart
= NULL
;
6713 /* Tag the blocks with a depth number so that change_scope can find
6714 the common parent easily. */
6715 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
6716 default_rtl_profile ();
6718 /* For -dx discard loops now, otherwise IL verify in clean_state will
6720 if (rtl_dump_and_exit
)
6722 cfun
->curr_properties
&= ~PROP_loops
;
6723 loop_optimizer_finalize ();
6726 timevar_pop (TV_POST_EXPAND
);
6734 make_pass_expand (gcc::context
*ctxt
)
6736 return new pass_expand (ctxt
);