1 /* A pass for lowering trees to RTL.
2 Copyright (C) 2004-2024 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify
7 it under the terms of the GNU General Public License as published by
8 the Free Software Foundation; either version 3, or (at your option)
11 GCC is distributed in the hope that it will be useful,
12 but WITHOUT ANY WARRANTY; without even the implied warranty of
13 MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
14 GNU General Public License for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
22 #include "coretypes.h"
29 #include "tree-pass.h"
34 #include "regs.h" /* For reg_renumber. */
38 #include "diagnostic.h"
39 #include "fold-const.h"
41 #include "stor-layout.h"
43 #include "print-tree.h"
47 #include "cfgcleanup.h"
52 #include "internal-fn.h"
54 #include "gimple-iterator.h"
55 #include "gimple-expr.h"
56 #include "gimple-walk.h"
61 #include "gimple-pretty-print.h"
64 #include "tree-inline.h"
65 #include "value-prof.h"
66 #include "tree-ssa-live.h"
67 #include "tree-outof-ssa.h"
69 #include "insn-attr.h" /* For INSN_SCHEDULING. */
70 #include "stringpool.h"
73 #include "tree-ssa-address.h"
78 /* Some systems use __main in a way incompatible with its use in gcc, in these
79 cases use the macros NAME__MAIN to give a quoted symbol and SYMBOL__MAIN to
80 give the same symbol without quotes for an alternative entry point. You
81 must define both, or neither. */
83 #define NAME__MAIN "__main"
86 /* This variable holds information helping the rewriting of SSA trees
90 /* This variable holds the currently expanded gimple statement for purposes
91 of comminucating the profile info to the builtin expanders. */
92 gimple
*currently_expanding_gimple_stmt
;
94 static rtx
expand_debug_expr (tree
);
96 static bool defer_stack_allocation (tree
, bool);
98 static void record_alignment_for_reg_var (unsigned int);
100 /* Return an expression tree corresponding to the RHS of GIMPLE
104 gimple_assign_rhs_to_tree (gimple
*stmt
)
107 switch (gimple_assign_rhs_class (stmt
))
109 case GIMPLE_TERNARY_RHS
:
110 t
= build3 (gimple_assign_rhs_code (stmt
),
111 TREE_TYPE (gimple_assign_lhs (stmt
)),
112 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
),
113 gimple_assign_rhs3 (stmt
));
115 case GIMPLE_BINARY_RHS
:
116 t
= build2 (gimple_assign_rhs_code (stmt
),
117 TREE_TYPE (gimple_assign_lhs (stmt
)),
118 gimple_assign_rhs1 (stmt
), gimple_assign_rhs2 (stmt
));
120 case GIMPLE_UNARY_RHS
:
121 t
= build1 (gimple_assign_rhs_code (stmt
),
122 TREE_TYPE (gimple_assign_lhs (stmt
)),
123 gimple_assign_rhs1 (stmt
));
125 case GIMPLE_SINGLE_RHS
:
127 t
= gimple_assign_rhs1 (stmt
);
128 /* Avoid modifying this tree in place below. */
129 if ((gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
)
130 && gimple_location (stmt
) != EXPR_LOCATION (t
))
131 || (gimple_block (stmt
) && currently_expanding_to_rtl
140 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (t
))
141 SET_EXPR_LOCATION (t
, gimple_location (stmt
));
147 #ifndef STACK_ALIGNMENT_NEEDED
148 #define STACK_ALIGNMENT_NEEDED 1
151 #define SSAVAR(x) (TREE_CODE (x) == SSA_NAME ? SSA_NAME_VAR (x) : x)
153 /* Choose either CUR or NEXT as the leader DECL for a partition.
154 Prefer ignored decls, to simplify debug dumps and reduce ambiguity
155 out of the same user variable being in multiple partitions (this is
156 less likely for compiler-introduced temps). */
159 leader_merge (tree cur
, tree next
)
161 if (cur
== NULL
|| cur
== next
)
164 if (DECL_P (cur
) && DECL_IGNORED_P (cur
))
167 if (DECL_P (next
) && DECL_IGNORED_P (next
))
173 /* Associate declaration T with storage space X. If T is no
174 SSA name this is exactly SET_DECL_RTL, otherwise make the
175 partition of T associated with X. */
177 set_rtl (tree t
, rtx x
)
179 gcc_checking_assert (!x
180 || !(TREE_CODE (t
) == SSA_NAME
|| is_gimple_reg (t
))
181 || (use_register_for_decl (t
)
183 || (GET_CODE (x
) == CONCAT
184 && (REG_P (XEXP (x
, 0))
185 || SUBREG_P (XEXP (x
, 0)))
186 && (REG_P (XEXP (x
, 1))
187 || SUBREG_P (XEXP (x
, 1))))
188 /* We need to accept PARALLELs for RESUT_DECLs
189 because of vector types with BLKmode returned
190 in multiple registers, but they are supposed
191 to be uncoalesced. */
192 || (GET_CODE (x
) == PARALLEL
194 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
195 && (GET_MODE (x
) == BLKmode
196 || !flag_tree_coalesce_vars
)))
197 : (MEM_P (x
) || x
== pc_rtx
198 || (GET_CODE (x
) == CONCAT
199 && MEM_P (XEXP (x
, 0))
200 && MEM_P (XEXP (x
, 1))))));
201 /* Check that the RTL for SSA_NAMEs and gimple-reg PARM_DECLs and
202 RESULT_DECLs has the expected mode. For memory, we accept
203 unpromoted modes, since that's what we're likely to get. For
204 PARM_DECLs and RESULT_DECLs, we'll have been called by
205 set_parm_rtl, which will give us the default def, so we don't
206 have to compute it ourselves. For RESULT_DECLs, we accept mode
207 mismatches too, as long as we have BLKmode or are not coalescing
208 across variables, so that we don't reject BLKmode PARALLELs or
210 gcc_checking_assert (!x
|| x
== pc_rtx
|| TREE_CODE (t
) != SSA_NAME
212 && TREE_CODE (SSAVAR (t
)) == RESULT_DECL
213 && (promote_ssa_mode (t
, NULL
) == BLKmode
214 || !flag_tree_coalesce_vars
))
215 || !use_register_for_decl (t
)
216 || GET_MODE (x
) == promote_ssa_mode (t
, NULL
));
221 tree cur
= NULL_TREE
;
229 else if (SUBREG_P (xm
))
231 gcc_assert (subreg_lowpart_p (xm
));
232 xm
= SUBREG_REG (xm
);
235 else if (GET_CODE (xm
) == CONCAT
)
240 else if (GET_CODE (xm
) == PARALLEL
)
242 xm
= XVECEXP (xm
, 0, 0);
243 gcc_assert (GET_CODE (xm
) == EXPR_LIST
);
247 else if (xm
== pc_rtx
)
252 tree next
= skip
? cur
: leader_merge (cur
, SSAVAR (t
) ? SSAVAR (t
) : t
);
257 set_mem_attributes (x
,
258 next
&& TREE_CODE (next
) == SSA_NAME
262 set_reg_attrs_for_decl_rtl (next
, x
);
266 if (TREE_CODE (t
) == SSA_NAME
)
268 int part
= var_to_partition (SA
.map
, t
);
269 if (part
!= NO_PARTITION
)
271 if (SA
.partition_to_pseudo
[part
])
272 gcc_assert (SA
.partition_to_pseudo
[part
] == x
);
273 else if (x
!= pc_rtx
)
274 SA
.partition_to_pseudo
[part
] = x
;
276 /* For the benefit of debug information at -O0 (where
277 vartracking doesn't run) record the place also in the base
278 DECL. For PARMs and RESULTs, do so only when setting the
280 if (x
&& x
!= pc_rtx
&& SSA_NAME_VAR (t
)
281 && (VAR_P (SSA_NAME_VAR (t
))
282 || SSA_NAME_IS_DEFAULT_DEF (t
)))
284 tree var
= SSA_NAME_VAR (t
);
285 /* If we don't yet have something recorded, just record it now. */
286 if (!DECL_RTL_SET_P (var
))
287 SET_DECL_RTL (var
, x
);
288 /* If we have it set already to "multiple places" don't
290 else if (DECL_RTL (var
) == pc_rtx
)
292 /* If we have something recorded and it's not the same place
293 as we want to record now, we have multiple partitions for the
294 same base variable, with different places. We can't just
295 randomly chose one, hence we have to say that we don't know.
296 This only happens with optimization, and there var-tracking
297 will figure out the right thing. */
298 else if (DECL_RTL (var
) != x
)
299 SET_DECL_RTL (var
, pc_rtx
);
306 /* This structure holds data relevant to one variable that will be
307 placed in a stack slot. */
314 /* Initially, the size of the variable. Later, the size of the partition,
315 if this variable becomes it's partition's representative. */
318 /* The *byte* alignment required for this variable. Or as, with the
319 size, the alignment for this partition. */
322 /* The partition representative. */
323 unsigned representative
;
325 /* The next stack variable in the partition, or EOC. */
328 /* The numbers of conflicting stack variables. */
332 #define EOC ((unsigned)-1)
334 /* We have an array of such objects while deciding allocation. */
335 static class stack_var
*stack_vars
;
336 static unsigned stack_vars_alloc
;
337 static unsigned stack_vars_num
;
338 static hash_map
<tree
, unsigned> *decl_to_stack_part
;
340 /* Conflict bitmaps go on this obstack. This allows us to destroy
341 all of them in one big sweep. */
342 static bitmap_obstack stack_var_bitmap_obstack
;
344 /* An array of indices such that stack_vars[stack_vars_sorted[i]].size
345 is non-decreasing. */
346 static unsigned *stack_vars_sorted
;
348 /* The phase of the stack frame. This is the known misalignment of
349 virtual_stack_vars_rtx from PREFERRED_STACK_BOUNDARY. That is,
350 (frame_offset+frame_phase) % PREFERRED_STACK_BOUNDARY == 0. */
351 static int frame_phase
;
353 /* Used during expand_used_vars to remember if we saw any decls for
354 which we'd like to enable stack smashing protection. */
355 static bool has_protected_decls
;
357 /* Used during expand_used_vars. Remember if we say a character buffer
358 smaller than our cutoff threshold. Used for -Wstack-protector. */
359 static bool has_short_buffer
;
361 /* Compute the byte alignment to use for DECL. Ignore alignment
362 we can't do with expected alignment of the stack boundary. */
365 align_local_variable (tree decl
, bool really_expand
)
369 if (TREE_CODE (decl
) == SSA_NAME
)
371 tree type
= TREE_TYPE (decl
);
372 machine_mode mode
= TYPE_MODE (type
);
374 align
= TYPE_ALIGN (type
);
376 && align
< GET_MODE_ALIGNMENT (mode
))
377 align
= GET_MODE_ALIGNMENT (mode
);
380 align
= LOCAL_DECL_ALIGNMENT (decl
);
382 if (hwasan_sanitize_stack_p ())
383 align
= MAX (align
, (unsigned) HWASAN_TAG_GRANULE_SIZE
* BITS_PER_UNIT
);
385 if (TREE_CODE (decl
) != SSA_NAME
&& really_expand
)
386 /* Don't change DECL_ALIGN when called from estimated_stack_frame_size.
387 That is done before IPA and could bump alignment based on host
388 backend even for offloaded code which wants different
389 LOCAL_DECL_ALIGNMENT. */
390 SET_DECL_ALIGN (decl
, align
);
392 return align
/ BITS_PER_UNIT
;
395 /* Align given offset BASE with ALIGN. Truncate up if ALIGN_UP is true,
396 down otherwise. Return truncated BASE value. */
398 static inline unsigned HOST_WIDE_INT
399 align_base (HOST_WIDE_INT base
, unsigned HOST_WIDE_INT align
, bool align_up
)
401 return align_up
? (base
+ align
- 1) & -align
: base
& -align
;
404 /* Allocate SIZE bytes at byte alignment ALIGN from the stack frame.
405 Return the frame offset. */
408 alloc_stack_frame_space (poly_int64 size
, unsigned HOST_WIDE_INT align
)
410 poly_int64 offset
, new_frame_offset
;
412 if (FRAME_GROWS_DOWNWARD
)
415 = aligned_lower_bound (frame_offset
- frame_phase
- size
,
416 align
) + frame_phase
;
417 offset
= new_frame_offset
;
422 = aligned_upper_bound (frame_offset
- frame_phase
,
423 align
) + frame_phase
;
424 offset
= new_frame_offset
;
425 new_frame_offset
+= size
;
427 frame_offset
= new_frame_offset
;
429 if (frame_offset_overflow (frame_offset
, cfun
->decl
))
430 frame_offset
= offset
= 0;
435 /* Ensure that the stack is aligned to ALIGN bytes.
436 Return the new frame offset. */
438 align_frame_offset (unsigned HOST_WIDE_INT align
)
440 return alloc_stack_frame_space (0, align
);
443 /* Accumulate DECL into STACK_VARS. */
446 add_stack_var (tree decl
, bool really_expand
)
450 if (stack_vars_num
>= stack_vars_alloc
)
452 if (stack_vars_alloc
)
453 stack_vars_alloc
= stack_vars_alloc
* 3 / 2;
455 stack_vars_alloc
= 32;
457 = XRESIZEVEC (class stack_var
, stack_vars
, stack_vars_alloc
);
459 if (!decl_to_stack_part
)
460 decl_to_stack_part
= new hash_map
<tree
, unsigned>;
462 v
= &stack_vars
[stack_vars_num
];
463 decl_to_stack_part
->put (decl
, stack_vars_num
);
466 tree size
= TREE_CODE (decl
) == SSA_NAME
467 ? TYPE_SIZE_UNIT (TREE_TYPE (decl
))
468 : DECL_SIZE_UNIT (decl
);
469 v
->size
= tree_to_poly_uint64 (size
);
470 /* Ensure that all variables have size, so that &a != &b for any two
471 variables that are simultaneously live. */
472 if (known_eq (v
->size
, 0U))
474 v
->alignb
= align_local_variable (decl
, really_expand
);
475 /* An alignment of zero can mightily confuse us later. */
476 gcc_assert (v
->alignb
!= 0);
478 /* All variables are initially in their own partition. */
479 v
->representative
= stack_vars_num
;
482 /* All variables initially conflict with no other. */
485 /* Ensure that this decl doesn't get put onto the list twice. */
486 set_rtl (decl
, pc_rtx
);
491 /* Make the decls associated with luid's X and Y conflict. */
494 add_stack_var_conflict (unsigned x
, unsigned y
)
496 class stack_var
*a
= &stack_vars
[x
];
497 class stack_var
*b
= &stack_vars
[y
];
501 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
503 b
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
504 bitmap_set_bit (a
->conflicts
, y
);
505 bitmap_set_bit (b
->conflicts
, x
);
508 /* Check whether the decls associated with luid's X and Y conflict. */
511 stack_var_conflict_p (unsigned x
, unsigned y
)
513 class stack_var
*a
= &stack_vars
[x
];
514 class stack_var
*b
= &stack_vars
[y
];
517 /* Partitions containing an SSA name result from gimple registers
518 with things like unsupported modes. They are top-level and
519 hence conflict with everything else. */
520 if (TREE_CODE (a
->decl
) == SSA_NAME
|| TREE_CODE (b
->decl
) == SSA_NAME
)
523 if (!a
->conflicts
|| !b
->conflicts
)
525 return bitmap_bit_p (a
->conflicts
, y
);
528 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
529 enter its partition number into bitmap DATA. */
532 visit_op (gimple
*, tree op
, tree
, void *data
)
534 bitmap active
= (bitmap
)data
;
535 op
= get_base_address (op
);
538 && DECL_RTL_IF_SET (op
) == pc_rtx
)
540 unsigned *v
= decl_to_stack_part
->get (op
);
542 bitmap_set_bit (active
, *v
);
547 /* Callback for walk_stmt_ops. If OP is a decl touched by add_stack_var
548 record conflicts between it and all currently active other partitions
552 visit_conflict (gimple
*, tree op
, tree
, void *data
)
554 bitmap active
= (bitmap
)data
;
555 op
= get_base_address (op
);
558 && DECL_RTL_IF_SET (op
) == pc_rtx
)
560 unsigned *v
= decl_to_stack_part
->get (op
);
561 if (v
&& bitmap_set_bit (active
, *v
))
566 gcc_assert (num
< stack_vars_num
);
567 EXECUTE_IF_SET_IN_BITMAP (active
, 0, i
, bi
)
568 add_stack_var_conflict (num
, i
);
574 /* Helper function for add_scope_conflicts_1. For USE on
575 a stmt, if it is a SSA_NAME and in its SSA_NAME_DEF_STMT is known to be
576 based on some ADDR_EXPR, invoke VISIT on that ADDR_EXPR. */
579 add_scope_conflicts_2 (tree use
, bitmap work
,
580 walk_stmt_load_store_addr_fn visit
)
582 if (TREE_CODE (use
) == SSA_NAME
583 && (POINTER_TYPE_P (TREE_TYPE (use
))
584 || INTEGRAL_TYPE_P (TREE_TYPE (use
))))
586 gimple
*g
= SSA_NAME_DEF_STMT (use
);
587 if (is_gimple_assign (g
))
588 if (tree op
= gimple_assign_rhs1 (g
))
589 if (TREE_CODE (op
) == ADDR_EXPR
)
590 visit (g
, TREE_OPERAND (op
, 0), op
, work
);
594 /* Helper routine for add_scope_conflicts, calculating the active partitions
595 at the end of BB, leaving the result in WORK. We're called to generate
596 conflicts when FOR_CONFLICT is true, otherwise we're just tracking
600 add_scope_conflicts_1 (basic_block bb
, bitmap work
, bool for_conflict
)
604 gimple_stmt_iterator gsi
;
605 walk_stmt_load_store_addr_fn visit
;
610 FOR_EACH_EDGE (e
, ei
, bb
->preds
)
611 bitmap_ior_into (work
, (bitmap
)e
->src
->aux
);
615 for (gsi
= gsi_start_phis (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
617 gimple
*stmt
= gsi_stmt (gsi
);
618 gphi
*phi
= as_a
<gphi
*> (stmt
);
619 walk_stmt_load_store_addr_ops (stmt
, work
, NULL
, NULL
, visit
);
620 FOR_EACH_PHI_ARG (use_p
, phi
, iter
, SSA_OP_USE
)
621 add_scope_conflicts_2 (USE_FROM_PTR (use_p
), work
, visit
);
623 for (gsi
= gsi_after_labels (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
625 gimple
*stmt
= gsi_stmt (gsi
);
627 if (gimple_clobber_p (stmt
))
629 tree lhs
= gimple_assign_lhs (stmt
);
631 /* Nested function lowering might introduce LHSs
632 that are COMPONENT_REFs. */
635 if (DECL_RTL_IF_SET (lhs
) == pc_rtx
636 && (v
= decl_to_stack_part
->get (lhs
)))
637 bitmap_clear_bit (work
, *v
);
639 else if (!is_gimple_debug (stmt
))
641 if (for_conflict
&& visit
== visit_op
)
643 /* When we are inheriting live variables from our predecessors
644 through a CFG merge we might not see an actual mention of
645 the variables to record the approprate conflict as defs/uses
646 might be through indirect stores/loads. For this reason
647 we have to make sure each live variable conflicts with
648 each other. When there's just a single predecessor the
649 set of conflicts is already up-to-date.
650 We perform this delayed at the first real instruction to
651 allow clobbers starting this block to remove variables from
652 the set of live variables. */
655 if (EDGE_COUNT (bb
->preds
) > 1)
656 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
658 class stack_var
*a
= &stack_vars
[i
];
660 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
661 bitmap_ior_into (a
->conflicts
, work
);
663 visit
= visit_conflict
;
665 walk_stmt_load_store_addr_ops (stmt
, work
, visit
, visit
, visit
);
666 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
667 add_scope_conflicts_2 (USE_FROM_PTR (use_p
), work
, visit
);
671 /* When there was no real instruction but there's a CFG merge we need
672 to add the conflicts now. */
673 if (for_conflict
&& visit
== visit_op
&& EDGE_COUNT (bb
->preds
) > 1)
677 EXECUTE_IF_SET_IN_BITMAP (work
, 0, i
, bi
)
679 class stack_var
*a
= &stack_vars
[i
];
681 a
->conflicts
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
682 bitmap_ior_into (a
->conflicts
, work
);
687 /* Generate stack partition conflicts between all partitions that are
688 simultaneously live. */
691 add_scope_conflicts (void)
695 bitmap work
= BITMAP_ALLOC (NULL
);
699 /* We approximate the live range of a stack variable by taking the first
700 mention of its name as starting point(s), and by the end-of-scope
701 death clobber added by gimplify as ending point(s) of the range.
702 This overapproximates in the case we for instance moved an address-taken
703 operation upward, without also moving a dereference to it upwards.
704 But it's conservatively correct as a variable never can hold values
705 before its name is mentioned at least once.
707 We then do a mostly classical bitmap liveness algorithm. */
709 FOR_ALL_BB_FN (bb
, cfun
)
710 bb
->aux
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
712 rpo
= XNEWVEC (int, last_basic_block_for_fn (cfun
));
713 n_bbs
= pre_and_rev_post_order_compute (NULL
, rpo
, false);
720 for (i
= 0; i
< n_bbs
; i
++)
723 bb
= BASIC_BLOCK_FOR_FN (cfun
, rpo
[i
]);
724 active
= (bitmap
)bb
->aux
;
725 add_scope_conflicts_1 (bb
, work
, false);
726 if (bitmap_ior_into (active
, work
))
731 FOR_EACH_BB_FN (bb
, cfun
)
732 add_scope_conflicts_1 (bb
, work
, true);
736 FOR_ALL_BB_FN (bb
, cfun
)
737 BITMAP_FREE (bb
->aux
);
740 /* A subroutine of partition_stack_vars. A comparison function for qsort,
741 sorting an array of indices by the properties of the object. */
744 stack_var_cmp (const void *a
, const void *b
)
746 unsigned ia
= *(const unsigned *)a
;
747 unsigned ib
= *(const unsigned *)b
;
748 unsigned int aligna
= stack_vars
[ia
].alignb
;
749 unsigned int alignb
= stack_vars
[ib
].alignb
;
750 poly_int64 sizea
= stack_vars
[ia
].size
;
751 poly_int64 sizeb
= stack_vars
[ib
].size
;
752 tree decla
= stack_vars
[ia
].decl
;
753 tree declb
= stack_vars
[ib
].decl
;
755 unsigned int uida
, uidb
;
757 /* Primary compare on "large" alignment. Large comes first. */
758 largea
= (aligna
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
759 largeb
= (alignb
* BITS_PER_UNIT
> MAX_SUPPORTED_STACK_ALIGNMENT
);
760 if (largea
!= largeb
)
761 return (int)largeb
- (int)largea
;
763 /* Secondary compare on size, decreasing */
764 int diff
= compare_sizes_for_sort (sizeb
, sizea
);
768 /* Tertiary compare on true alignment, decreasing. */
774 /* Final compare on ID for sort stability, increasing.
775 Two SSA names are compared by their version, SSA names come before
776 non-SSA names, and two normal decls are compared by their DECL_UID. */
777 if (TREE_CODE (decla
) == SSA_NAME
)
779 if (TREE_CODE (declb
) == SSA_NAME
)
780 uida
= SSA_NAME_VERSION (decla
), uidb
= SSA_NAME_VERSION (declb
);
784 else if (TREE_CODE (declb
) == SSA_NAME
)
787 uida
= DECL_UID (decla
), uidb
= DECL_UID (declb
);
795 struct part_traits
: unbounded_int_hashmap_traits
<unsigned , bitmap
> {};
796 typedef hash_map
<unsigned, bitmap
, part_traits
> part_hashmap
;
798 /* If the points-to solution *PI points to variables that are in a partition
799 together with other variables add all partition members to the pointed-to
803 add_partitioned_vars_to_ptset (struct pt_solution
*pt
,
804 part_hashmap
*decls_to_partitions
,
805 hash_set
<bitmap
> *visited
, bitmap temp
)
813 /* The pointed-to vars bitmap is shared, it is enough to
815 || visited
->add (pt
->vars
))
820 /* By using a temporary bitmap to store all members of the partitions
821 we have to add we make sure to visit each of the partitions only
823 EXECUTE_IF_SET_IN_BITMAP (pt
->vars
, 0, i
, bi
)
825 || !bitmap_bit_p (temp
, i
))
826 && (part
= decls_to_partitions
->get (i
)))
827 bitmap_ior_into (temp
, *part
);
828 if (!bitmap_empty_p (temp
))
829 bitmap_ior_into (pt
->vars
, temp
);
832 /* Update points-to sets based on partition info, so we can use them on RTL.
833 The bitmaps representing stack partitions will be saved until expand,
834 where partitioned decls used as bases in memory expressions will be
837 It is not necessary to update TBAA info on accesses to the coalesced
838 storage since our memory model doesn't allow TBAA to be used for
839 WAW or WAR dependences. For RAW when the write is to an old object
840 the new object would not have been initialized at the point of the
841 read, invoking undefined behavior. */
844 update_alias_info_with_stack_vars (void)
846 part_hashmap
*decls_to_partitions
= NULL
;
848 tree var
= NULL_TREE
;
850 for (i
= 0; i
< stack_vars_num
; i
++)
854 struct ptr_info_def
*pi
;
856 /* Not interested in partitions with single variable. */
857 if (stack_vars
[i
].representative
!= i
858 || stack_vars
[i
].next
== EOC
)
861 if (!decls_to_partitions
)
863 decls_to_partitions
= new part_hashmap
;
864 cfun
->gimple_df
->decls_to_pointers
= new hash_map
<tree
, tree
>;
867 /* Create an SSA_NAME that points to the partition for use
868 as base during alias-oracle queries on RTL for bases that
869 have been partitioned. */
870 if (var
== NULL_TREE
)
871 var
= create_tmp_var (ptr_type_node
);
872 name
= make_ssa_name (var
);
874 /* Create bitmaps representing partitions. They will be used for
875 points-to sets later, so use GGC alloc. */
876 part
= BITMAP_GGC_ALLOC ();
877 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
879 tree decl
= stack_vars
[j
].decl
;
880 unsigned int uid
= DECL_PT_UID (decl
);
881 bitmap_set_bit (part
, uid
);
882 decls_to_partitions
->put (uid
, part
);
883 cfun
->gimple_df
->decls_to_pointers
->put (decl
, name
);
884 if (TREE_ADDRESSABLE (decl
))
885 TREE_ADDRESSABLE (name
) = 1;
888 /* Make the SSA name point to all partition members. */
889 pi
= get_ptr_info (name
);
890 pt_solution_set (&pi
->pt
, part
, false);
893 /* Make all points-to sets that contain one member of a partition
894 contain all members of the partition. */
895 if (decls_to_partitions
)
899 hash_set
<bitmap
> visited
;
900 bitmap temp
= BITMAP_ALLOC (&stack_var_bitmap_obstack
);
902 FOR_EACH_SSA_NAME (i
, name
, cfun
)
904 struct ptr_info_def
*pi
;
906 if (POINTER_TYPE_P (TREE_TYPE (name
))
907 && ((pi
= SSA_NAME_PTR_INFO (name
)) != NULL
))
908 add_partitioned_vars_to_ptset (&pi
->pt
, decls_to_partitions
,
912 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped
,
913 decls_to_partitions
, &visited
, temp
);
914 add_partitioned_vars_to_ptset (&cfun
->gimple_df
->escaped_return
,
915 decls_to_partitions
, &visited
, temp
);
916 delete decls_to_partitions
;
921 /* A subroutine of partition_stack_vars. The UNION portion of a UNION/FIND
922 partitioning algorithm. Partitions A and B are known to be non-conflicting.
923 Merge them into a single partition A. */
926 union_stack_vars (unsigned a
, unsigned b
)
928 class stack_var
*vb
= &stack_vars
[b
];
932 gcc_assert (stack_vars
[b
].next
== EOC
);
933 /* Add B to A's partition. */
934 stack_vars
[b
].next
= stack_vars
[a
].next
;
935 stack_vars
[b
].representative
= a
;
936 stack_vars
[a
].next
= b
;
938 /* Make sure A is big enough to hold B. */
939 stack_vars
[a
].size
= upper_bound (stack_vars
[a
].size
, stack_vars
[b
].size
);
941 /* Update the required alignment of partition A to account for B. */
942 if (stack_vars
[a
].alignb
< stack_vars
[b
].alignb
)
943 stack_vars
[a
].alignb
= stack_vars
[b
].alignb
;
945 /* Update the interference graph and merge the conflicts. */
948 EXECUTE_IF_SET_IN_BITMAP (vb
->conflicts
, 0, u
, bi
)
949 add_stack_var_conflict (a
, stack_vars
[u
].representative
);
950 BITMAP_FREE (vb
->conflicts
);
954 /* A subroutine of expand_used_vars. Binpack the variables into
955 partitions constrained by the interference graph. The overall
956 algorithm used is as follows:
958 Sort the objects by size in descending order.
963 Look for the largest non-conflicting object B with size <= S.
970 partition_stack_vars (void)
972 unsigned si
, sj
, n
= stack_vars_num
;
974 stack_vars_sorted
= XNEWVEC (unsigned, stack_vars_num
);
975 for (si
= 0; si
< n
; ++si
)
976 stack_vars_sorted
[si
] = si
;
981 qsort (stack_vars_sorted
, n
, sizeof (unsigned), stack_var_cmp
);
983 for (si
= 0; si
< n
; ++si
)
985 unsigned i
= stack_vars_sorted
[si
];
986 unsigned int ialign
= stack_vars
[i
].alignb
;
987 poly_int64 isize
= stack_vars
[i
].size
;
989 /* Ignore objects that aren't partition representatives. If we
990 see a var that is not a partition representative, it must
991 have been merged earlier. */
992 if (stack_vars
[i
].representative
!= i
)
995 for (sj
= si
+ 1; sj
< n
; ++sj
)
997 unsigned j
= stack_vars_sorted
[sj
];
998 unsigned int jalign
= stack_vars
[j
].alignb
;
999 poly_int64 jsize
= stack_vars
[j
].size
;
1001 /* Ignore objects that aren't partition representatives. */
1002 if (stack_vars
[j
].representative
!= j
)
1005 /* Do not mix objects of "small" (supported) alignment
1006 and "large" (unsupported) alignment. */
1007 if ((ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1008 != (jalign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
))
1011 /* For Address Sanitizer do not mix objects with different
1012 sizes, as the shorter vars wouldn't be adequately protected.
1013 Don't do that for "large" (unsupported) alignment objects,
1014 those aren't protected anyway. */
1015 if (asan_sanitize_stack_p ()
1016 && maybe_ne (isize
, jsize
)
1017 && ialign
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1020 /* Ignore conflicting objects. */
1021 if (stack_var_conflict_p (i
, j
))
1024 /* UNION the objects, placing J at OFFSET. */
1025 union_stack_vars (i
, j
);
1029 update_alias_info_with_stack_vars ();
1032 /* A debugging aid for expand_used_vars. Dump the generated partitions. */
1035 dump_stack_var_partition (void)
1037 unsigned si
, i
, j
, n
= stack_vars_num
;
1039 for (si
= 0; si
< n
; ++si
)
1041 i
= stack_vars_sorted
[si
];
1043 /* Skip variables that aren't partition representatives, for now. */
1044 if (stack_vars
[i
].representative
!= i
)
1047 fprintf (dump_file
, "Partition %u: size ", i
);
1048 print_dec (stack_vars
[i
].size
, dump_file
);
1049 fprintf (dump_file
, " align %u\n", stack_vars
[i
].alignb
);
1051 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1053 fputc ('\t', dump_file
);
1054 print_generic_expr (dump_file
, stack_vars
[j
].decl
, dump_flags
);
1056 fputc ('\n', dump_file
);
1060 /* Assign rtl to DECL at BASE + OFFSET. */
1063 expand_one_stack_var_at (tree decl
, rtx base
, unsigned base_align
,
1069 /* If this fails, we've overflowed the stack frame. Error nicely? */
1070 gcc_assert (known_eq (offset
, trunc_int_for_mode (offset
, Pmode
)));
1072 if (hwasan_sanitize_stack_p ())
1073 x
= targetm
.memtag
.add_tag (base
, offset
,
1074 hwasan_current_frame_tag ());
1076 x
= plus_constant (Pmode
, base
, offset
);
1078 x
= gen_rtx_MEM (TREE_CODE (decl
) == SSA_NAME
1079 ? TYPE_MODE (TREE_TYPE (decl
))
1080 : DECL_MODE (decl
), x
);
1082 /* Set alignment we actually gave this decl if it isn't an SSA name.
1083 If it is we generate stack slots only accidentally so it isn't as
1084 important, we'll simply set the alignment directly on the MEM. */
1086 if (stack_vars_base_reg_p (base
))
1087 offset
-= frame_phase
;
1088 align
= known_alignment (offset
);
1089 align
*= BITS_PER_UNIT
;
1090 if (align
== 0 || align
> base_align
)
1093 if (TREE_CODE (decl
) != SSA_NAME
)
1095 /* One would think that we could assert that we're not decreasing
1096 alignment here, but (at least) the i386 port does exactly this
1097 via the MINIMUM_ALIGNMENT hook. */
1099 SET_DECL_ALIGN (decl
, align
);
1100 DECL_USER_ALIGN (decl
) = 0;
1105 set_mem_align (x
, align
);
1108 class stack_vars_data
1111 /* Vector of offset pairs, always end of some padding followed
1112 by start of the padding that needs Address Sanitizer protection.
1113 The vector is in reversed, highest offset pairs come first. */
1114 auto_vec
<HOST_WIDE_INT
> asan_vec
;
1116 /* Vector of partition representative decls in between the paddings. */
1117 auto_vec
<tree
> asan_decl_vec
;
1119 /* Base pseudo register for Address Sanitizer protected automatic vars. */
1122 /* Alignment needed for the Address Sanitizer protected automatic vars. */
1123 unsigned int asan_alignb
;
1126 /* A subroutine of expand_used_vars. Give each partition representative
1127 a unique location within the stack frame. Update each partition member
1128 with that location. */
1130 expand_stack_vars (bool (*pred
) (unsigned), class stack_vars_data
*data
)
1132 unsigned si
, i
, j
, n
= stack_vars_num
;
1133 poly_uint64 large_size
= 0, large_alloc
= 0;
1134 rtx large_base
= NULL
;
1135 rtx large_untagged_base
= NULL
;
1136 unsigned large_align
= 0;
1137 bool large_allocation_done
= false;
1140 /* Determine if there are any variables requiring "large" alignment.
1141 Since these are dynamically allocated, we only process these if
1142 no predicate involved. */
1143 large_align
= stack_vars
[stack_vars_sorted
[0]].alignb
* BITS_PER_UNIT
;
1144 if (pred
== NULL
&& large_align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1146 /* Find the total size of these variables. */
1147 for (si
= 0; si
< n
; ++si
)
1151 i
= stack_vars_sorted
[si
];
1152 alignb
= stack_vars
[i
].alignb
;
1154 /* All "large" alignment decls come before all "small" alignment
1155 decls, but "large" alignment decls are not sorted based on
1156 their alignment. Increase large_align to track the largest
1157 required alignment. */
1158 if ((alignb
* BITS_PER_UNIT
) > large_align
)
1159 large_align
= alignb
* BITS_PER_UNIT
;
1161 /* Stop when we get to the first decl with "small" alignment. */
1162 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1165 /* Skip variables that aren't partition representatives. */
1166 if (stack_vars
[i
].representative
!= i
)
1169 /* Skip variables that have already had rtl assigned. See also
1170 add_stack_var where we perpetrate this pc_rtx hack. */
1171 decl
= stack_vars
[i
].decl
;
1172 if (TREE_CODE (decl
) == SSA_NAME
1173 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1174 : DECL_RTL (decl
) != pc_rtx
)
1177 large_size
= aligned_upper_bound (large_size
, alignb
);
1178 large_size
+= stack_vars
[i
].size
;
1182 for (si
= 0; si
< n
; ++si
)
1185 unsigned base_align
, alignb
;
1186 poly_int64 offset
= 0;
1188 i
= stack_vars_sorted
[si
];
1190 /* Skip variables that aren't partition representatives, for now. */
1191 if (stack_vars
[i
].representative
!= i
)
1194 /* Skip variables that have already had rtl assigned. See also
1195 add_stack_var where we perpetrate this pc_rtx hack. */
1196 decl
= stack_vars
[i
].decl
;
1197 if (TREE_CODE (decl
) == SSA_NAME
1198 ? SA
.partition_to_pseudo
[var_to_partition (SA
.map
, decl
)] != NULL_RTX
1199 : DECL_RTL (decl
) != pc_rtx
)
1202 /* Check the predicate to see whether this variable should be
1203 allocated in this pass. */
1204 if (pred
&& !pred (i
))
1207 base
= (hwasan_sanitize_stack_p ()
1208 ? hwasan_frame_base ()
1209 : virtual_stack_vars_rtx
);
1210 alignb
= stack_vars
[i
].alignb
;
1211 if (alignb
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
)
1213 poly_int64 hwasan_orig_offset
;
1214 if (hwasan_sanitize_stack_p ())
1216 /* There must be no tag granule "shared" between different
1217 objects. This means that no HWASAN_TAG_GRANULE_SIZE byte
1218 chunk can have more than one object in it.
1220 We ensure this by forcing the end of the last bit of data to
1221 be aligned to HWASAN_TAG_GRANULE_SIZE bytes here, and setting
1222 the start of each variable to be aligned to
1223 HWASAN_TAG_GRANULE_SIZE bytes in `align_local_variable`.
1225 We can't align just one of the start or end, since there are
1226 untagged things stored on the stack which we do not align to
1227 HWASAN_TAG_GRANULE_SIZE bytes. If we only aligned the start
1228 or the end of tagged objects then untagged objects could end
1229 up sharing the first granule of a tagged object or sharing the
1230 last granule of a tagged object respectively. */
1231 hwasan_orig_offset
= align_frame_offset (HWASAN_TAG_GRANULE_SIZE
);
1232 gcc_assert (stack_vars
[i
].alignb
>= HWASAN_TAG_GRANULE_SIZE
);
1234 /* ASAN description strings don't yet have a syntax for expressing
1235 polynomial offsets. */
1236 HOST_WIDE_INT prev_offset
;
1237 if (asan_sanitize_stack_p ()
1239 && frame_offset
.is_constant (&prev_offset
)
1240 && stack_vars
[i
].size
.is_constant ())
1242 if (data
->asan_vec
.is_empty ())
1244 align_frame_offset (ASAN_RED_ZONE_SIZE
);
1245 prev_offset
= frame_offset
.to_constant ();
1247 prev_offset
= align_base (prev_offset
,
1248 ASAN_MIN_RED_ZONE_SIZE
,
1249 !FRAME_GROWS_DOWNWARD
);
1250 tree repr_decl
= NULL_TREE
;
1251 unsigned HOST_WIDE_INT size
1252 = asan_var_and_redzone_size (stack_vars
[i
].size
.to_constant ());
1253 if (data
->asan_vec
.is_empty ())
1254 size
= MAX (size
, ASAN_RED_ZONE_SIZE
);
1256 unsigned HOST_WIDE_INT alignment
= MAX (alignb
,
1257 ASAN_MIN_RED_ZONE_SIZE
);
1258 offset
= alloc_stack_frame_space (size
, alignment
);
1260 data
->asan_vec
.safe_push (prev_offset
);
1261 /* Allocating a constant amount of space from a constant
1262 starting offset must give a constant result. */
1263 data
->asan_vec
.safe_push ((offset
+ stack_vars
[i
].size
)
1265 /* Find best representative of the partition.
1266 Prefer those with DECL_NAME, even better
1267 satisfying asan_protect_stack_decl predicate. */
1268 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1269 if (asan_protect_stack_decl (stack_vars
[j
].decl
)
1270 && DECL_NAME (stack_vars
[j
].decl
))
1272 repr_decl
= stack_vars
[j
].decl
;
1275 else if (repr_decl
== NULL_TREE
1276 && DECL_P (stack_vars
[j
].decl
)
1277 && DECL_NAME (stack_vars
[j
].decl
))
1278 repr_decl
= stack_vars
[j
].decl
;
1279 if (repr_decl
== NULL_TREE
)
1280 repr_decl
= stack_vars
[i
].decl
;
1281 data
->asan_decl_vec
.safe_push (repr_decl
);
1283 /* Make sure a representative is unpoison if another
1284 variable in the partition is handled by
1285 use-after-scope sanitization. */
1286 if (asan_handled_variables
!= NULL
1287 && !asan_handled_variables
->contains (repr_decl
))
1289 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1290 if (asan_handled_variables
->contains (stack_vars
[j
].decl
))
1293 asan_handled_variables
->add (repr_decl
);
1296 data
->asan_alignb
= MAX (data
->asan_alignb
, alignb
);
1297 if (data
->asan_base
== NULL
)
1298 data
->asan_base
= gen_reg_rtx (Pmode
);
1299 base
= data
->asan_base
;
1301 if (!STRICT_ALIGNMENT
)
1302 base_align
= crtl
->max_used_stack_slot_alignment
;
1304 base_align
= MAX (crtl
->max_used_stack_slot_alignment
,
1305 GET_MODE_ALIGNMENT (SImode
)
1306 << ASAN_SHADOW_SHIFT
);
1310 offset
= alloc_stack_frame_space (stack_vars
[i
].size
, alignb
);
1311 base_align
= crtl
->max_used_stack_slot_alignment
;
1313 if (hwasan_sanitize_stack_p ())
1315 /* Align again since the point of this alignment is to handle
1316 the "end" of the object (i.e. smallest address after the
1317 stack object). For FRAME_GROWS_DOWNWARD that requires
1318 aligning the stack before allocating, but for a frame that
1319 grows upwards that requires aligning the stack after
1322 Use `frame_offset` to record the offset value rather than
1323 `offset` since the `frame_offset` describes the extent
1324 allocated for this particular variable while `offset`
1325 describes the address that this variable starts at. */
1326 align_frame_offset (HWASAN_TAG_GRANULE_SIZE
);
1327 hwasan_record_stack_var (virtual_stack_vars_rtx
, base
,
1328 hwasan_orig_offset
, frame_offset
);
1334 /* Large alignment is only processed in the last pass. */
1338 /* If there were any variables requiring "large" alignment, allocate
1340 if (maybe_ne (large_size
, 0U) && ! large_allocation_done
)
1343 rtx large_allocsize
;
1345 large_allocsize
= gen_int_mode (large_size
, Pmode
);
1346 get_dynamic_stack_size (&large_allocsize
, 0, large_align
, NULL
);
1347 loffset
= alloc_stack_frame_space
1348 (rtx_to_poly_int64 (large_allocsize
),
1349 PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
);
1350 large_base
= get_dynamic_stack_base (loffset
, large_align
, base
);
1351 large_allocation_done
= true;
1354 gcc_assert (large_base
!= NULL
);
1355 large_alloc
= aligned_upper_bound (large_alloc
, alignb
);
1356 offset
= large_alloc
;
1357 large_alloc
+= stack_vars
[i
].size
;
1358 if (hwasan_sanitize_stack_p ())
1360 /* An object with a large alignment requirement means that the
1361 alignment requirement is greater than the required alignment
1363 if (!large_untagged_base
)
1365 = targetm
.memtag
.untagged_pointer (large_base
, NULL_RTX
);
1366 /* Ensure the end of the variable is also aligned correctly. */
1367 poly_int64 align_again
1368 = aligned_upper_bound (large_alloc
, HWASAN_TAG_GRANULE_SIZE
);
1369 /* For large allocations we always allocate a chunk of space
1370 (which is addressed by large_untagged_base/large_base) and
1371 then use positive offsets from that. Hence the farthest
1372 offset is `align_again` and the nearest offset from the base
1374 hwasan_record_stack_var (large_untagged_base
, large_base
,
1375 offset
, align_again
);
1379 base_align
= large_align
;
1382 /* Create rtl for each variable based on their location within the
1384 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1386 expand_one_stack_var_at (stack_vars
[j
].decl
,
1387 base
, base_align
, offset
);
1389 if (hwasan_sanitize_stack_p ())
1390 hwasan_increment_frame_tag ();
1393 gcc_assert (known_eq (large_alloc
, large_size
));
1396 /* Take into account all sizes of partitions and reset DECL_RTLs. */
1398 account_stack_vars (void)
1400 unsigned si
, j
, i
, n
= stack_vars_num
;
1401 poly_uint64 size
= 0;
1403 for (si
= 0; si
< n
; ++si
)
1405 i
= stack_vars_sorted
[si
];
1407 /* Skip variables that aren't partition representatives, for now. */
1408 if (stack_vars
[i
].representative
!= i
)
1411 size
+= stack_vars
[i
].size
;
1412 for (j
= i
; j
!= EOC
; j
= stack_vars
[j
].next
)
1413 set_rtl (stack_vars
[j
].decl
, NULL
);
1418 /* Record the RTL assignment X for the default def of PARM. */
1421 set_parm_rtl (tree parm
, rtx x
)
1423 gcc_assert (TREE_CODE (parm
) == PARM_DECL
1424 || TREE_CODE (parm
) == RESULT_DECL
);
1426 if (x
&& !MEM_P (x
))
1428 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (parm
),
1429 TYPE_MODE (TREE_TYPE (parm
)),
1430 TYPE_ALIGN (TREE_TYPE (parm
)));
1432 /* If the variable alignment is very large we'll dynamicaly
1433 allocate it, which means that in-frame portion is just a
1434 pointer. ??? We've got a pseudo for sure here, do we
1435 actually dynamically allocate its spilling area if needed?
1436 ??? Isn't it a problem when Pmode alignment also exceeds
1437 MAX_SUPPORTED_STACK_ALIGNMENT, as can happen on cris and lm32? */
1438 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1439 align
= GET_MODE_ALIGNMENT (Pmode
);
1441 record_alignment_for_reg_var (align
);
1444 tree ssa
= ssa_default_def (cfun
, parm
);
1446 return set_rtl (parm
, x
);
1448 int part
= var_to_partition (SA
.map
, ssa
);
1449 gcc_assert (part
!= NO_PARTITION
);
1451 bool changed
= bitmap_bit_p (SA
.partitions_for_parm_default_defs
, part
);
1452 gcc_assert (changed
);
1455 gcc_assert (DECL_RTL (parm
) == x
);
1458 /* A subroutine of expand_one_var. Called to immediately assign rtl
1459 to a variable to be allocated in the stack frame. */
1462 expand_one_stack_var_1 (tree var
)
1466 unsigned byte_align
;
1468 if (TREE_CODE (var
) == SSA_NAME
)
1470 tree type
= TREE_TYPE (var
);
1471 size
= tree_to_poly_uint64 (TYPE_SIZE_UNIT (type
));
1474 size
= tree_to_poly_uint64 (DECL_SIZE_UNIT (var
));
1476 byte_align
= align_local_variable (var
, true);
1478 /* We handle highly aligned variables in expand_stack_vars. */
1479 gcc_assert (byte_align
* BITS_PER_UNIT
<= MAX_SUPPORTED_STACK_ALIGNMENT
);
1482 if (hwasan_sanitize_stack_p ())
1484 /* Allocate zero bytes to align the stack. */
1485 poly_int64 hwasan_orig_offset
1486 = align_frame_offset (HWASAN_TAG_GRANULE_SIZE
);
1487 offset
= alloc_stack_frame_space (size
, byte_align
);
1488 align_frame_offset (HWASAN_TAG_GRANULE_SIZE
);
1489 base
= hwasan_frame_base ();
1490 /* Use `frame_offset` to automatically account for machines where the
1491 frame grows upwards.
1493 `offset` will always point to the "start" of the stack object, which
1494 will be the smallest address, for ! FRAME_GROWS_DOWNWARD this is *not*
1495 the "furthest" offset from the base delimiting the current stack
1496 object. `frame_offset` will always delimit the extent that the frame.
1498 hwasan_record_stack_var (virtual_stack_vars_rtx
, base
,
1499 hwasan_orig_offset
, frame_offset
);
1503 offset
= alloc_stack_frame_space (size
, byte_align
);
1504 base
= virtual_stack_vars_rtx
;
1507 expand_one_stack_var_at (var
, base
,
1508 crtl
->max_used_stack_slot_alignment
, offset
);
1510 if (hwasan_sanitize_stack_p ())
1511 hwasan_increment_frame_tag ();
1514 /* Wrapper for expand_one_stack_var_1 that checks SSA_NAMEs are
1515 already assigned some MEM. */
1518 expand_one_stack_var (tree var
)
1520 if (TREE_CODE (var
) == SSA_NAME
)
1522 int part
= var_to_partition (SA
.map
, var
);
1523 if (part
!= NO_PARTITION
)
1525 rtx x
= SA
.partition_to_pseudo
[part
];
1527 gcc_assert (MEM_P (x
));
1532 return expand_one_stack_var_1 (var
);
1535 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1536 that will reside in a hard register. */
1539 expand_one_hard_reg_var (tree var
)
1541 rest_of_decl_compilation (var
, 0, 0);
1544 /* Record the alignment requirements of some variable assigned to a
1548 record_alignment_for_reg_var (unsigned int align
)
1550 if (SUPPORTS_STACK_ALIGNMENT
1551 && crtl
->stack_alignment_estimated
< align
)
1553 /* stack_alignment_estimated shouldn't change after stack
1554 realign decision made */
1555 gcc_assert (!crtl
->stack_realign_processed
);
1556 crtl
->stack_alignment_estimated
= align
;
1559 /* stack_alignment_needed > PREFERRED_STACK_BOUNDARY is permitted.
1560 So here we only make sure stack_alignment_needed >= align. */
1561 if (crtl
->stack_alignment_needed
< align
)
1562 crtl
->stack_alignment_needed
= align
;
1563 if (crtl
->max_used_stack_slot_alignment
< align
)
1564 crtl
->max_used_stack_slot_alignment
= align
;
1567 /* Create RTL for an SSA partition. */
1570 expand_one_ssa_partition (tree var
)
1572 int part
= var_to_partition (SA
.map
, var
);
1573 gcc_assert (part
!= NO_PARTITION
);
1575 if (SA
.partition_to_pseudo
[part
])
1578 unsigned int align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1579 TYPE_MODE (TREE_TYPE (var
)),
1580 TYPE_ALIGN (TREE_TYPE (var
)));
1582 /* If the variable alignment is very large we'll dynamicaly allocate
1583 it, which means that in-frame portion is just a pointer. */
1584 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1585 align
= GET_MODE_ALIGNMENT (Pmode
);
1587 record_alignment_for_reg_var (align
);
1589 if (!use_register_for_decl (var
))
1591 if (defer_stack_allocation (var
, true))
1592 add_stack_var (var
, true);
1594 expand_one_stack_var_1 (var
);
1598 machine_mode reg_mode
= promote_ssa_mode (var
, NULL
);
1599 rtx x
= gen_reg_rtx (reg_mode
);
1603 /* For a promoted variable, X will not be used directly but wrapped in a
1604 SUBREG with SUBREG_PROMOTED_VAR_P set, which means that the RTL land
1605 will assume that its upper bits can be inferred from its lower bits.
1606 Therefore, if X isn't initialized on every path from the entry, then
1607 we must do it manually in order to fulfill the above assumption. */
1608 if (reg_mode
!= TYPE_MODE (TREE_TYPE (var
))
1609 && bitmap_bit_p (SA
.partitions_for_undefined_values
, part
))
1610 emit_move_insn (x
, CONST0_RTX (reg_mode
));
1613 /* Record the association between the RTL generated for partition PART
1614 and the underlying variable of the SSA_NAME VAR. */
1617 adjust_one_expanded_partition_var (tree var
)
1622 tree decl
= SSA_NAME_VAR (var
);
1624 int part
= var_to_partition (SA
.map
, var
);
1625 if (part
== NO_PARTITION
)
1628 rtx x
= SA
.partition_to_pseudo
[part
];
1637 /* Note if the object is a user variable. */
1638 if (decl
&& !DECL_ARTIFICIAL (decl
))
1641 if (POINTER_TYPE_P (decl
? TREE_TYPE (decl
) : TREE_TYPE (var
)))
1642 mark_reg_pointer (x
, get_pointer_alignment (var
));
1645 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL
1646 that will reside in a pseudo register. */
1649 expand_one_register_var (tree var
)
1651 if (TREE_CODE (var
) == SSA_NAME
)
1653 int part
= var_to_partition (SA
.map
, var
);
1654 if (part
!= NO_PARTITION
)
1656 rtx x
= SA
.partition_to_pseudo
[part
];
1658 gcc_assert (REG_P (x
));
1665 tree type
= TREE_TYPE (decl
);
1666 machine_mode reg_mode
= promote_decl_mode (decl
, NULL
);
1667 rtx x
= gen_reg_rtx (reg_mode
);
1671 /* Note if the object is a user variable. */
1672 if (!DECL_ARTIFICIAL (decl
))
1675 if (POINTER_TYPE_P (type
))
1676 mark_reg_pointer (x
, get_pointer_alignment (var
));
1679 /* A subroutine of expand_one_var. Called to assign rtl to a VAR_DECL that
1680 has some associated error, e.g. its type is error-mark. We just need
1681 to pick something that won't crash the rest of the compiler. */
1684 expand_one_error_var (tree var
)
1686 machine_mode mode
= DECL_MODE (var
);
1689 if (mode
== BLKmode
)
1690 x
= gen_rtx_MEM (BLKmode
, const0_rtx
);
1691 else if (mode
== VOIDmode
)
1694 x
= gen_reg_rtx (mode
);
1696 SET_DECL_RTL (var
, x
);
1699 /* A subroutine of expand_one_var. VAR is a variable that will be
1700 allocated to the local stack frame. Return true if we wish to
1701 add VAR to STACK_VARS so that it will be coalesced with other
1702 variables. Return false to allocate VAR immediately.
1704 This function is used to reduce the number of variables considered
1705 for coalescing, which reduces the size of the quadratic problem. */
1708 defer_stack_allocation (tree var
, bool toplevel
)
1710 tree size_unit
= TREE_CODE (var
) == SSA_NAME
1711 ? TYPE_SIZE_UNIT (TREE_TYPE (var
))
1712 : DECL_SIZE_UNIT (var
);
1715 /* Whether the variable is small enough for immediate allocation not to be
1716 a problem with regard to the frame size. */
1718 = (poly_int_tree_p (size_unit
, &size
)
1719 && (estimated_poly_value (size
)
1720 < param_min_size_for_stack_sharing
));
1722 /* If stack protection is enabled, *all* stack variables must be deferred,
1723 so that we can re-order the strings to the top of the frame.
1724 Similarly for Address Sanitizer. */
1725 if (flag_stack_protect
|| asan_sanitize_stack_p ())
1728 unsigned int align
= TREE_CODE (var
) == SSA_NAME
1729 ? TYPE_ALIGN (TREE_TYPE (var
))
1732 /* We handle "large" alignment via dynamic allocation. We want to handle
1733 this extra complication in only one place, so defer them. */
1734 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1737 bool ignored
= TREE_CODE (var
) == SSA_NAME
1738 ? !SSAVAR (var
) || DECL_IGNORED_P (SSA_NAME_VAR (var
))
1739 : DECL_IGNORED_P (var
);
1741 /* When optimization is enabled, DECL_IGNORED_P variables originally scoped
1742 might be detached from their block and appear at toplevel when we reach
1743 here. We want to coalesce them with variables from other blocks when
1744 the immediate contribution to the frame size would be noticeable. */
1745 if (toplevel
&& optimize
> 0 && ignored
&& !smallish
)
1748 /* Variables declared in the outermost scope automatically conflict
1749 with every other variable. The only reason to want to defer them
1750 at all is that, after sorting, we can more efficiently pack
1751 small variables in the stack frame. Continue to defer at -O2. */
1752 if (toplevel
&& optimize
< 2)
1755 /* Without optimization, *most* variables are allocated from the
1756 stack, which makes the quadratic problem large exactly when we
1757 want compilation to proceed as quickly as possible. On the
1758 other hand, we don't want the function's stack frame size to
1759 get completely out of hand. So we avoid adding scalars and
1760 "small" aggregates to the list at all. */
1761 if (optimize
== 0 && smallish
)
1767 /* A subroutine of expand_used_vars. Expand one variable according to
1768 its flavor. Variables to be placed on the stack are not actually
1769 expanded yet, merely recorded.
1770 When REALLY_EXPAND is false, only add stack values to be allocated.
1771 Return stack usage this variable is supposed to take.
1775 expand_one_var (tree var
, bool toplevel
, bool really_expand
,
1776 bitmap forced_stack_var
= NULL
)
1778 unsigned int align
= BITS_PER_UNIT
;
1783 if (TREE_TYPE (var
) != error_mark_node
&& VAR_P (var
))
1785 if (is_global_var (var
))
1788 /* Because we don't know if VAR will be in register or on stack,
1789 we conservatively assume it will be on stack even if VAR is
1790 eventually put into register after RA pass. For non-automatic
1791 variables, which won't be on stack, we collect alignment of
1792 type and ignore user specified alignment. Similarly for
1793 SSA_NAMEs for which use_register_for_decl returns true. */
1794 if (TREE_STATIC (var
)
1795 || DECL_EXTERNAL (var
)
1796 || (TREE_CODE (origvar
) == SSA_NAME
&& use_register_for_decl (var
)))
1797 align
= MINIMUM_ALIGNMENT (TREE_TYPE (var
),
1798 TYPE_MODE (TREE_TYPE (var
)),
1799 TYPE_ALIGN (TREE_TYPE (var
)));
1800 else if (DECL_HAS_VALUE_EXPR_P (var
)
1801 || (DECL_RTL_SET_P (var
) && MEM_P (DECL_RTL (var
))))
1802 /* Don't consider debug only variables with DECL_HAS_VALUE_EXPR_P set
1803 or variables which were assigned a stack slot already by
1804 expand_one_stack_var_at - in the latter case DECL_ALIGN has been
1805 changed from the offset chosen to it. */
1806 align
= crtl
->stack_alignment_estimated
;
1808 align
= MINIMUM_ALIGNMENT (var
, DECL_MODE (var
), DECL_ALIGN (var
));
1810 /* If the variable alignment is very large we'll dynamicaly allocate
1811 it, which means that in-frame portion is just a pointer. */
1812 if (align
> MAX_SUPPORTED_STACK_ALIGNMENT
)
1813 align
= GET_MODE_ALIGNMENT (Pmode
);
1816 record_alignment_for_reg_var (align
);
1819 if (TREE_CODE (origvar
) == SSA_NAME
)
1821 gcc_assert (!VAR_P (var
)
1822 || (!DECL_EXTERNAL (var
)
1823 && !DECL_HAS_VALUE_EXPR_P (var
)
1824 && !TREE_STATIC (var
)
1825 && TREE_TYPE (var
) != error_mark_node
1826 && !DECL_HARD_REGISTER (var
)
1829 if (!VAR_P (var
) && TREE_CODE (origvar
) != SSA_NAME
)
1831 else if (DECL_EXTERNAL (var
))
1833 else if (DECL_HAS_VALUE_EXPR_P (var
))
1835 else if (TREE_STATIC (var
))
1837 else if (TREE_CODE (origvar
) != SSA_NAME
&& DECL_RTL_SET_P (var
))
1839 else if (TREE_TYPE (var
) == error_mark_node
)
1842 expand_one_error_var (var
);
1844 else if (VAR_P (var
) && DECL_HARD_REGISTER (var
))
1848 expand_one_hard_reg_var (var
);
1849 if (!DECL_HARD_REGISTER (var
))
1850 /* Invalid register specification. */
1851 expand_one_error_var (var
);
1854 else if (use_register_for_decl (var
)
1855 && (!forced_stack_var
1856 || !bitmap_bit_p (forced_stack_var
, DECL_UID (var
))))
1859 expand_one_register_var (origvar
);
1861 else if (!poly_int_tree_p (DECL_SIZE_UNIT (var
), &size
)
1862 || !valid_constant_size_p (DECL_SIZE_UNIT (var
)))
1864 /* Reject variables which cover more than half of the address-space. */
1867 if (DECL_NONLOCAL_FRAME (var
))
1868 error_at (DECL_SOURCE_LOCATION (current_function_decl
),
1869 "total size of local objects is too large");
1871 error_at (DECL_SOURCE_LOCATION (var
),
1872 "size of variable %q+D is too large", var
);
1873 expand_one_error_var (var
);
1876 else if (defer_stack_allocation (var
, toplevel
))
1877 add_stack_var (origvar
, really_expand
);
1882 if (lookup_attribute ("naked",
1883 DECL_ATTRIBUTES (current_function_decl
)))
1884 error ("cannot allocate stack for variable %q+D, naked function",
1887 expand_one_stack_var (origvar
);
1894 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1895 expanding variables. Those variables that can be put into registers
1896 are allocated pseudos; those that can't are put on the stack.
1898 TOPLEVEL is true if this is the outermost BLOCK. */
1901 expand_used_vars_for_block (tree block
, bool toplevel
, bitmap forced_stack_vars
)
1905 /* Expand all variables at this level. */
1906 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1908 && ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1909 || !DECL_NONSHAREABLE (t
)))
1910 expand_one_var (t
, toplevel
, true, forced_stack_vars
);
1912 /* Expand all variables at containing levels. */
1913 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1914 expand_used_vars_for_block (t
, false, forced_stack_vars
);
1917 /* A subroutine of expand_used_vars. Walk down through the BLOCK tree
1918 and clear TREE_USED on all local variables. */
1921 clear_tree_used (tree block
)
1925 for (t
= BLOCK_VARS (block
); t
; t
= DECL_CHAIN (t
))
1926 /* if (!TREE_STATIC (t) && !DECL_EXTERNAL (t)) */
1927 if ((!VAR_P (t
) && TREE_CODE (t
) != RESULT_DECL
)
1928 || !DECL_NONSHAREABLE (t
))
1931 for (t
= BLOCK_SUBBLOCKS (block
); t
; t
= BLOCK_CHAIN (t
))
1932 clear_tree_used (t
);
1935 /* Examine TYPE and determine a bit mask of the following features. */
1937 #define SPCT_HAS_LARGE_CHAR_ARRAY 1
1938 #define SPCT_HAS_SMALL_CHAR_ARRAY 2
1939 #define SPCT_HAS_ARRAY 4
1940 #define SPCT_HAS_AGGREGATE 8
1943 stack_protect_classify_type (tree type
)
1945 unsigned int ret
= 0;
1948 switch (TREE_CODE (type
))
1951 t
= TYPE_MAIN_VARIANT (TREE_TYPE (type
));
1952 if (t
== char_type_node
1953 || t
== signed_char_type_node
1954 || t
== unsigned_char_type_node
)
1956 unsigned HOST_WIDE_INT max
= param_ssp_buffer_size
;
1957 unsigned HOST_WIDE_INT len
;
1959 if (!TYPE_SIZE_UNIT (type
)
1960 || !tree_fits_uhwi_p (TYPE_SIZE_UNIT (type
)))
1963 len
= tree_to_uhwi (TYPE_SIZE_UNIT (type
));
1966 ret
= SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1968 ret
= SPCT_HAS_LARGE_CHAR_ARRAY
| SPCT_HAS_ARRAY
;
1971 ret
= SPCT_HAS_ARRAY
;
1975 case QUAL_UNION_TYPE
:
1977 ret
= SPCT_HAS_AGGREGATE
;
1978 for (t
= TYPE_FIELDS (type
); t
; t
= TREE_CHAIN (t
))
1979 if (TREE_CODE (t
) == FIELD_DECL
)
1980 ret
|= stack_protect_classify_type (TREE_TYPE (t
));
1990 /* Return nonzero if DECL should be segregated into the "vulnerable" upper
1991 part of the local stack frame. Remember if we ever return nonzero for
1992 any variable in this function. The return value is the phase number in
1993 which the variable should be allocated. */
1996 stack_protect_decl_phase (tree decl
)
1998 unsigned int bits
= stack_protect_classify_type (TREE_TYPE (decl
));
2001 if (bits
& SPCT_HAS_SMALL_CHAR_ARRAY
)
2002 has_short_buffer
= true;
2004 tree attribs
= DECL_ATTRIBUTES (current_function_decl
);
2005 if (!lookup_attribute ("no_stack_protector", attribs
)
2006 && (flag_stack_protect
== SPCT_FLAG_ALL
2007 || flag_stack_protect
== SPCT_FLAG_STRONG
2008 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2009 && lookup_attribute ("stack_protect", attribs
))))
2011 if ((bits
& (SPCT_HAS_SMALL_CHAR_ARRAY
| SPCT_HAS_LARGE_CHAR_ARRAY
))
2012 && !(bits
& SPCT_HAS_AGGREGATE
))
2014 else if (bits
& SPCT_HAS_ARRAY
)
2018 ret
= (bits
& SPCT_HAS_LARGE_CHAR_ARRAY
) != 0;
2021 has_protected_decls
= true;
2026 /* Two helper routines that check for phase 1 and phase 2. These are used
2027 as callbacks for expand_stack_vars. */
2030 stack_protect_decl_phase_1 (unsigned i
)
2032 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 1;
2036 stack_protect_decl_phase_2 (unsigned i
)
2038 return stack_protect_decl_phase (stack_vars
[i
].decl
) == 2;
2041 /* And helper function that checks for asan phase (with stack protector
2042 it is phase 3). This is used as callback for expand_stack_vars.
2043 Returns true if any of the vars in the partition need to be protected. */
2046 asan_decl_phase_3 (unsigned i
)
2050 if (asan_protect_stack_decl (stack_vars
[i
].decl
))
2052 i
= stack_vars
[i
].next
;
2057 /* Ensure that variables in different stack protection phases conflict
2058 so that they are not merged and share the same stack slot.
2059 Return true if there are any address taken variables. */
2062 add_stack_protection_conflicts (void)
2064 unsigned i
, j
, n
= stack_vars_num
;
2065 unsigned char *phase
;
2068 phase
= XNEWVEC (unsigned char, n
);
2069 for (i
= 0; i
< n
; ++i
)
2071 phase
[i
] = stack_protect_decl_phase (stack_vars
[i
].decl
);
2072 if (TREE_ADDRESSABLE (stack_vars
[i
].decl
))
2076 for (i
= 0; i
< n
; ++i
)
2078 unsigned char ph_i
= phase
[i
];
2079 for (j
= i
+ 1; j
< n
; ++j
)
2080 if (ph_i
!= phase
[j
])
2081 add_stack_var_conflict (i
, j
);
2088 /* Create a decl for the guard at the top of the stack frame. */
2091 create_stack_guard (void)
2093 tree guard
= build_decl (DECL_SOURCE_LOCATION (current_function_decl
),
2094 VAR_DECL
, NULL
, ptr_type_node
);
2095 TREE_THIS_VOLATILE (guard
) = 1;
2096 TREE_USED (guard
) = 1;
2097 expand_one_stack_var (guard
);
2098 crtl
->stack_protect_guard
= guard
;
2101 /* Prepare for expanding variables. */
2103 init_vars_expansion (void)
2105 /* Conflict bitmaps, and a few related temporary bitmaps, go here. */
2106 bitmap_obstack_initialize (&stack_var_bitmap_obstack
);
2108 /* A map from decl to stack partition. */
2109 decl_to_stack_part
= new hash_map
<tree
, unsigned>;
2111 /* Initialize local stack smashing state. */
2112 has_protected_decls
= false;
2113 has_short_buffer
= false;
2114 if (hwasan_sanitize_stack_p ())
2115 hwasan_record_frame_init ();
2118 /* Free up stack variable graph data. */
2120 fini_vars_expansion (void)
2122 bitmap_obstack_release (&stack_var_bitmap_obstack
);
2124 XDELETEVEC (stack_vars
);
2125 if (stack_vars_sorted
)
2126 XDELETEVEC (stack_vars_sorted
);
2128 stack_vars_sorted
= NULL
;
2129 stack_vars_alloc
= stack_vars_num
= 0;
2130 delete decl_to_stack_part
;
2131 decl_to_stack_part
= NULL
;
2134 /* Make a fair guess for the size of the stack frame of the function
2135 in NODE. This doesn't have to be exact, the result is only used in
2136 the inline heuristics. So we don't want to run the full stack var
2137 packing algorithm (which is quadratic in the number of stack vars).
2138 Instead, we calculate the total size of all stack vars. This turns
2139 out to be a pretty fair estimate -- packing of stack vars doesn't
2140 happen very often. */
2143 estimated_stack_frame_size (struct cgraph_node
*node
)
2145 poly_int64 size
= 0;
2148 struct function
*fn
= DECL_STRUCT_FUNCTION (node
->decl
);
2152 init_vars_expansion ();
2154 FOR_EACH_LOCAL_DECL (fn
, i
, var
)
2155 if (auto_var_in_fn_p (var
, fn
->decl
))
2156 size
+= expand_one_var (var
, true, false);
2158 if (stack_vars_num
> 0)
2160 /* Fake sorting the stack vars for account_stack_vars (). */
2161 stack_vars_sorted
= XNEWVEC (unsigned , stack_vars_num
);
2162 for (i
= 0; i
< stack_vars_num
; ++i
)
2163 stack_vars_sorted
[i
] = i
;
2164 size
+= account_stack_vars ();
2167 fini_vars_expansion ();
2169 return estimated_poly_value (size
);
2172 /* Check if the current function has calls that use a return slot. */
2175 stack_protect_return_slot_p ()
2179 FOR_ALL_BB_FN (bb
, cfun
)
2180 for (gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2181 !gsi_end_p (gsi
); gsi_next (&gsi
))
2183 gimple
*stmt
= gsi_stmt (gsi
);
2184 /* This assumes that calls to internal-only functions never
2185 use a return slot. */
2186 if (is_gimple_call (stmt
)
2187 && !gimple_call_internal_p (stmt
)
2188 && aggregate_value_p (TREE_TYPE (gimple_call_fntype (stmt
)),
2189 gimple_call_fndecl (stmt
)))
2195 /* Expand all variables used in the function. */
2198 expand_used_vars (bitmap forced_stack_vars
)
2200 tree var
, outer_block
= DECL_INITIAL (current_function_decl
);
2201 auto_vec
<tree
> maybe_local_decls
;
2202 rtx_insn
*var_end_seq
= NULL
;
2205 bool gen_stack_protect_signal
= false;
2207 /* Compute the phase of the stack frame for this function. */
2209 int align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2210 int off
= targetm
.starting_frame_offset () % align
;
2211 frame_phase
= off
? align
- off
: 0;
2214 /* Set TREE_USED on all variables in the local_decls. */
2215 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2216 TREE_USED (var
) = 1;
2217 /* Clear TREE_USED on all variables associated with a block scope. */
2218 clear_tree_used (DECL_INITIAL (current_function_decl
));
2220 init_vars_expansion ();
2222 if (targetm
.use_pseudo_pic_reg ())
2223 pic_offset_table_rtx
= gen_reg_rtx (Pmode
);
2225 for (i
= 0; i
< SA
.map
->num_partitions
; i
++)
2227 if (bitmap_bit_p (SA
.partitions_for_parm_default_defs
, i
))
2230 tree var
= partition_to_var (SA
.map
, i
);
2232 gcc_assert (!virtual_operand_p (var
));
2234 expand_one_ssa_partition (var
);
2237 if (flag_stack_protect
== SPCT_FLAG_STRONG
)
2238 gen_stack_protect_signal
= stack_protect_return_slot_p ();
2240 /* At this point all variables on the local_decls with TREE_USED
2241 set are not associated with any block scope. Lay them out. */
2243 len
= vec_safe_length (cfun
->local_decls
);
2244 FOR_EACH_LOCAL_DECL (cfun
, i
, var
)
2246 bool expand_now
= false;
2248 /* Expanded above already. */
2249 if (is_gimple_reg (var
))
2251 TREE_USED (var
) = 0;
2254 /* We didn't set a block for static or extern because it's hard
2255 to tell the difference between a global variable (re)declared
2256 in a local scope, and one that's really declared there to
2257 begin with. And it doesn't really matter much, since we're
2258 not giving them stack space. Expand them now. */
2259 else if (TREE_STATIC (var
) || DECL_EXTERNAL (var
))
2262 /* Expand variables not associated with any block now. Those created by
2263 the optimizers could be live anywhere in the function. Those that
2264 could possibly have been scoped originally and detached from their
2265 block will have their allocation deferred so we coalesce them with
2266 others when optimization is enabled. */
2267 else if (TREE_USED (var
))
2270 /* Finally, mark all variables on the list as used. We'll use
2271 this in a moment when we expand those associated with scopes. */
2272 TREE_USED (var
) = 1;
2275 expand_one_var (var
, true, true, forced_stack_vars
);
2278 if (DECL_ARTIFICIAL (var
) && !DECL_IGNORED_P (var
))
2280 rtx rtl
= DECL_RTL_IF_SET (var
);
2282 /* Keep artificial non-ignored vars in cfun->local_decls
2283 chain until instantiate_decls. */
2284 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2285 add_local_decl (cfun
, var
);
2286 else if (rtl
== NULL_RTX
)
2287 /* If rtl isn't set yet, which can happen e.g. with
2288 -fstack-protector, retry before returning from this
2290 maybe_local_decls
.safe_push (var
);
2294 /* We duplicated some of the decls in CFUN->LOCAL_DECLS.
2296 +-----------------+-----------------+
2297 | ...processed... | ...duplicates...|
2298 +-----------------+-----------------+
2300 +-- LEN points here.
2302 We just want the duplicates, as those are the artificial
2303 non-ignored vars that we want to keep until instantiate_decls.
2304 Move them down and truncate the array. */
2305 if (!vec_safe_is_empty (cfun
->local_decls
))
2306 cfun
->local_decls
->block_remove (0, len
);
2308 /* At this point, all variables within the block tree with TREE_USED
2309 set are actually used by the optimized function. Lay them out. */
2310 expand_used_vars_for_block (outer_block
, true, forced_stack_vars
);
2312 tree attribs
= DECL_ATTRIBUTES (current_function_decl
);
2313 if (stack_vars_num
> 0)
2315 bool has_addressable_vars
= false;
2317 add_scope_conflicts ();
2319 /* If stack protection is enabled, we don't share space between
2320 vulnerable data and non-vulnerable data. */
2321 if (flag_stack_protect
!= 0
2322 && !lookup_attribute ("no_stack_protector", attribs
)
2323 && (flag_stack_protect
!= SPCT_FLAG_EXPLICIT
2324 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2325 && lookup_attribute ("stack_protect", attribs
))))
2326 has_addressable_vars
= add_stack_protection_conflicts ();
2328 if (flag_stack_protect
== SPCT_FLAG_STRONG
&& has_addressable_vars
)
2329 gen_stack_protect_signal
= true;
2331 /* Now that we have collected all stack variables, and have computed a
2332 minimal interference graph, attempt to save some stack space. */
2333 partition_stack_vars ();
2335 dump_stack_var_partition ();
2339 if (!lookup_attribute ("no_stack_protector", attribs
))
2340 switch (flag_stack_protect
)
2343 create_stack_guard ();
2346 case SPCT_FLAG_STRONG
:
2347 if (gen_stack_protect_signal
2348 || cfun
->calls_alloca
2349 || has_protected_decls
2350 || lookup_attribute ("stack_protect", attribs
))
2351 create_stack_guard ();
2354 case SPCT_FLAG_DEFAULT
:
2355 if (cfun
->calls_alloca
2356 || has_protected_decls
2357 || lookup_attribute ("stack_protect", attribs
))
2358 create_stack_guard ();
2361 case SPCT_FLAG_EXPLICIT
:
2362 if (lookup_attribute ("stack_protect", attribs
))
2363 create_stack_guard ();
2370 /* Assign rtl to each variable based on these partitions. */
2371 if (stack_vars_num
> 0)
2373 class stack_vars_data data
;
2375 data
.asan_base
= NULL_RTX
;
2376 data
.asan_alignb
= 0;
2378 /* Reorder decls to be protected by iterating over the variables
2379 array multiple times, and allocating out of each phase in turn. */
2380 /* ??? We could probably integrate this into the qsort we did
2381 earlier, such that we naturally see these variables first,
2382 and thus naturally allocate things in the right order. */
2383 if (has_protected_decls
)
2385 /* Phase 1 contains only character arrays. */
2386 expand_stack_vars (stack_protect_decl_phase_1
, &data
);
2388 /* Phase 2 contains other kinds of arrays. */
2389 if (!lookup_attribute ("no_stack_protector", attribs
)
2390 && (flag_stack_protect
== SPCT_FLAG_ALL
2391 || flag_stack_protect
== SPCT_FLAG_STRONG
2392 || (flag_stack_protect
== SPCT_FLAG_EXPLICIT
2393 && lookup_attribute ("stack_protect", attribs
))))
2394 expand_stack_vars (stack_protect_decl_phase_2
, &data
);
2397 if (asan_sanitize_stack_p ())
2398 /* Phase 3, any partitions that need asan protection
2399 in addition to phase 1 and 2. */
2400 expand_stack_vars (asan_decl_phase_3
, &data
);
2402 /* ASAN description strings don't yet have a syntax for expressing
2403 polynomial offsets. */
2404 HOST_WIDE_INT prev_offset
;
2405 if (!data
.asan_vec
.is_empty ()
2406 && frame_offset
.is_constant (&prev_offset
))
2408 HOST_WIDE_INT offset
, sz
, redzonesz
;
2409 redzonesz
= ASAN_RED_ZONE_SIZE
;
2410 sz
= data
.asan_vec
[0] - prev_offset
;
2411 if (data
.asan_alignb
> ASAN_RED_ZONE_SIZE
2412 && data
.asan_alignb
<= 4096
2413 && sz
+ ASAN_RED_ZONE_SIZE
>= (int) data
.asan_alignb
)
2414 redzonesz
= ((sz
+ ASAN_RED_ZONE_SIZE
+ data
.asan_alignb
- 1)
2415 & ~(data
.asan_alignb
- HOST_WIDE_INT_1
)) - sz
;
2416 /* Allocating a constant amount of space from a constant
2417 starting offset must give a constant result. */
2418 offset
= (alloc_stack_frame_space (redzonesz
, ASAN_RED_ZONE_SIZE
)
2420 data
.asan_vec
.safe_push (prev_offset
);
2421 data
.asan_vec
.safe_push (offset
);
2422 /* Leave space for alignment if STRICT_ALIGNMENT. */
2423 if (STRICT_ALIGNMENT
)
2424 alloc_stack_frame_space ((GET_MODE_ALIGNMENT (SImode
)
2425 << ASAN_SHADOW_SHIFT
)
2426 / BITS_PER_UNIT
, 1);
2429 = asan_emit_stack_protection (virtual_stack_vars_rtx
,
2432 data
.asan_vec
.address (),
2433 data
.asan_decl_vec
.address (),
2434 data
.asan_vec
.length ());
2437 expand_stack_vars (NULL
, &data
);
2440 if (hwasan_sanitize_stack_p ())
2441 hwasan_emit_prologue ();
2442 if (asan_sanitize_allocas_p () && cfun
->calls_alloca
)
2443 var_end_seq
= asan_emit_allocas_unpoison (virtual_stack_dynamic_rtx
,
2444 virtual_stack_vars_rtx
,
2446 else if (hwasan_sanitize_allocas_p () && cfun
->calls_alloca
)
2447 /* When using out-of-line instrumentation we only want to emit one function
2448 call for clearing the tags in a region of shadow stack. When there are
2449 alloca calls in this frame we want to emit a call using the
2450 virtual_stack_dynamic_rtx, but when not we use the hwasan_frame_extent
2451 rtx we created in expand_stack_vars. */
2452 var_end_seq
= hwasan_emit_untag_frame (virtual_stack_dynamic_rtx
,
2453 virtual_stack_vars_rtx
);
2454 else if (hwasan_sanitize_stack_p ())
2455 /* If no variables were stored on the stack, `hwasan_get_frame_extent`
2456 will return NULL_RTX and hence `hwasan_emit_untag_frame` will return
2457 NULL (i.e. an empty sequence). */
2458 var_end_seq
= hwasan_emit_untag_frame (hwasan_get_frame_extent (),
2459 virtual_stack_vars_rtx
);
2461 fini_vars_expansion ();
2463 /* If there were any artificial non-ignored vars without rtl
2464 found earlier, see if deferred stack allocation hasn't assigned
2466 FOR_EACH_VEC_ELT_REVERSE (maybe_local_decls
, i
, var
)
2468 rtx rtl
= DECL_RTL_IF_SET (var
);
2470 /* Keep artificial non-ignored vars in cfun->local_decls
2471 chain until instantiate_decls. */
2472 if (rtl
&& (MEM_P (rtl
) || GET_CODE (rtl
) == CONCAT
))
2473 add_local_decl (cfun
, var
);
2476 /* If the target requires that FRAME_OFFSET be aligned, do it. */
2477 if (STACK_ALIGNMENT_NEEDED
)
2479 HOST_WIDE_INT align
= PREFERRED_STACK_BOUNDARY
/ BITS_PER_UNIT
;
2480 if (FRAME_GROWS_DOWNWARD
)
2481 frame_offset
= aligned_lower_bound (frame_offset
, align
);
2483 frame_offset
= aligned_upper_bound (frame_offset
, align
);
2490 /* If we need to produce a detailed dump, print the tree representation
2491 for STMT to the dump file. SINCE is the last RTX after which the RTL
2492 generated for STMT should have been appended. */
2495 maybe_dump_rtl_for_gimple_stmt (gimple
*stmt
, rtx_insn
*since
)
2497 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
2499 fprintf (dump_file
, "\n;; ");
2500 print_gimple_stmt (dump_file
, stmt
, 0,
2501 TDF_SLIM
| (dump_flags
& TDF_LINENO
));
2502 fprintf (dump_file
, "\n");
2504 print_rtl (dump_file
, since
? NEXT_INSN (since
) : since
);
2508 /* Maps the blocks that do not contain tree labels to rtx labels. */
2510 static hash_map
<basic_block
, rtx_code_label
*> *lab_rtx_for_bb
;
2512 /* Returns the label_rtx expression for a label starting basic block BB. */
2514 static rtx_code_label
*
2515 label_rtx_for_bb (basic_block bb ATTRIBUTE_UNUSED
)
2517 if (bb
->flags
& BB_RTL
)
2518 return block_label (bb
);
2520 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
2524 /* Find the tree label if it is present. */
2525 gimple_stmt_iterator gsi
= gsi_start_bb (bb
);
2527 if (!gsi_end_p (gsi
)
2528 && (lab_stmt
= dyn_cast
<glabel
*> (gsi_stmt (gsi
)))
2529 && !DECL_NONLOCAL (gimple_label_label (lab_stmt
)))
2530 return jump_target_rtx (gimple_label_label (lab_stmt
));
2532 rtx_code_label
*l
= gen_label_rtx ();
2533 lab_rtx_for_bb
->put (bb
, l
);
2538 /* A subroutine of expand_gimple_cond. Given E, a fallthrough edge
2539 of a basic block where we just expanded the conditional at the end,
2540 possibly clean up the CFG and instruction sequence. LAST is the
2541 last instruction before the just emitted jump sequence. */
2544 maybe_cleanup_end_of_block (edge e
, rtx_insn
*last
)
2546 /* Special case: when jumpif decides that the condition is
2547 trivial it emits an unconditional jump (and the necessary
2548 barrier). But we still have two edges, the fallthru one is
2549 wrong. purge_dead_edges would clean this up later. Unfortunately
2550 we have to insert insns (and split edges) before
2551 find_many_sub_basic_blocks and hence before purge_dead_edges.
2552 But splitting edges might create new blocks which depend on the
2553 fact that if there are two edges there's no barrier. So the
2554 barrier would get lost and verify_flow_info would ICE. Instead
2555 of auditing all edge splitters to care for the barrier (which
2556 normally isn't there in a cleaned CFG), fix it here. */
2557 if (BARRIER_P (get_last_insn ()))
2561 /* Now, we have a single successor block, if we have insns to
2562 insert on the remaining edge we potentially will insert
2563 it at the end of this block (if the dest block isn't feasible)
2564 in order to avoid splitting the edge. This insertion will take
2565 place in front of the last jump. But we might have emitted
2566 multiple jumps (conditional and one unconditional) to the
2567 same destination. Inserting in front of the last one then
2568 is a problem. See PR 40021. We fix this by deleting all
2569 jumps except the last unconditional one. */
2570 insn
= PREV_INSN (get_last_insn ());
2571 /* Make sure we have an unconditional jump. Otherwise we're
2573 gcc_assert (JUMP_P (insn
) && !any_condjump_p (insn
));
2574 for (insn
= PREV_INSN (insn
); insn
!= last
;)
2576 insn
= PREV_INSN (insn
);
2577 if (JUMP_P (NEXT_INSN (insn
)))
2579 if (!any_condjump_p (NEXT_INSN (insn
)))
2581 gcc_assert (BARRIER_P (NEXT_INSN (NEXT_INSN (insn
))));
2582 delete_insn (NEXT_INSN (NEXT_INSN (insn
)));
2584 delete_insn (NEXT_INSN (insn
));
2590 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_COND.
2591 Returns a new basic block if we've terminated the current basic
2592 block and created a new one. */
2595 expand_gimple_cond (basic_block bb
, gcond
*stmt
)
2597 basic_block new_bb
, dest
;
2600 rtx_insn
*last2
, *last
;
2601 enum tree_code code
;
2604 code
= gimple_cond_code (stmt
);
2605 op0
= gimple_cond_lhs (stmt
);
2606 op1
= gimple_cond_rhs (stmt
);
2607 /* We're sometimes presented with such code:
2611 This would expand to two comparisons which then later might
2612 be cleaned up by combine. But some pattern matchers like if-conversion
2613 work better when there's only one compare, so make up for this
2614 here as special exception if TER would have made the same change. */
2616 && TREE_CODE (op0
) == SSA_NAME
2617 && TREE_CODE (TREE_TYPE (op0
)) == BOOLEAN_TYPE
2618 && TREE_CODE (op1
) == INTEGER_CST
2619 && ((gimple_cond_code (stmt
) == NE_EXPR
2620 && integer_zerop (op1
))
2621 || (gimple_cond_code (stmt
) == EQ_EXPR
2622 && integer_onep (op1
)))
2623 && bitmap_bit_p (SA
.values
, SSA_NAME_VERSION (op0
)))
2625 gimple
*second
= SSA_NAME_DEF_STMT (op0
);
2626 if (gimple_code (second
) == GIMPLE_ASSIGN
)
2628 enum tree_code code2
= gimple_assign_rhs_code (second
);
2629 if (TREE_CODE_CLASS (code2
) == tcc_comparison
)
2632 op0
= gimple_assign_rhs1 (second
);
2633 op1
= gimple_assign_rhs2 (second
);
2635 /* If jumps are cheap and the target does not support conditional
2636 compare, turn some more codes into jumpy sequences. */
2637 else if (BRANCH_COST (optimize_insn_for_speed_p (), false) < 4
2638 && targetm
.gen_ccmp_first
== NULL
)
2640 if ((code2
== BIT_AND_EXPR
2641 && TYPE_PRECISION (TREE_TYPE (op0
)) == 1
2642 && TREE_CODE (gimple_assign_rhs2 (second
)) != INTEGER_CST
)
2643 || code2
== TRUTH_AND_EXPR
)
2645 code
= TRUTH_ANDIF_EXPR
;
2646 op0
= gimple_assign_rhs1 (second
);
2647 op1
= gimple_assign_rhs2 (second
);
2649 else if (code2
== BIT_IOR_EXPR
|| code2
== TRUTH_OR_EXPR
)
2651 code
= TRUTH_ORIF_EXPR
;
2652 op0
= gimple_assign_rhs1 (second
);
2653 op1
= gimple_assign_rhs2 (second
);
2659 /* Optimize (x % C1) == C2 or (x % C1) != C2 if it is beneficial
2660 into (x - C2) * C3 < C4. */
2661 if ((code
== EQ_EXPR
|| code
== NE_EXPR
)
2662 && TREE_CODE (op0
) == SSA_NAME
2663 && TREE_CODE (op1
) == INTEGER_CST
)
2664 code
= maybe_optimize_mod_cmp (code
, &op0
, &op1
);
2666 /* Optimize (x - y) < 0 into x < y if x - y has undefined overflow. */
2667 if (!TYPE_UNSIGNED (TREE_TYPE (op0
))
2668 && (code
== LT_EXPR
|| code
== LE_EXPR
2669 || code
== GT_EXPR
|| code
== GE_EXPR
)
2670 && integer_zerop (op1
)
2671 && TREE_CODE (op0
) == SSA_NAME
)
2672 maybe_optimize_sub_cmp_0 (code
, &op0
, &op1
);
2674 last2
= last
= get_last_insn ();
2676 extract_true_false_edges_from_block (bb
, &true_edge
, &false_edge
);
2677 set_curr_insn_location (gimple_location (stmt
));
2679 /* These flags have no purpose in RTL land. */
2680 true_edge
->flags
&= ~EDGE_TRUE_VALUE
;
2681 false_edge
->flags
&= ~EDGE_FALSE_VALUE
;
2683 /* We can either have a pure conditional jump with one fallthru edge or
2684 two-way jump that needs to be decomposed into two basic blocks. */
2685 if (false_edge
->dest
== bb
->next_bb
)
2687 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2688 true_edge
->probability
);
2689 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2690 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2691 set_curr_insn_location (true_edge
->goto_locus
);
2692 false_edge
->flags
|= EDGE_FALLTHRU
;
2693 maybe_cleanup_end_of_block (false_edge
, last
);
2696 if (true_edge
->dest
== bb
->next_bb
)
2698 jumpifnot_1 (code
, op0
, op1
, label_rtx_for_bb (false_edge
->dest
),
2699 false_edge
->probability
);
2700 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
2701 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2702 set_curr_insn_location (false_edge
->goto_locus
);
2703 true_edge
->flags
|= EDGE_FALLTHRU
;
2704 maybe_cleanup_end_of_block (true_edge
, last
);
2708 jumpif_1 (code
, op0
, op1
, label_rtx_for_bb (true_edge
->dest
),
2709 true_edge
->probability
);
2710 last
= get_last_insn ();
2711 if (false_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2712 set_curr_insn_location (false_edge
->goto_locus
);
2713 emit_jump (label_rtx_for_bb (false_edge
->dest
));
2716 if (BARRIER_P (BB_END (bb
)))
2717 BB_END (bb
) = PREV_INSN (BB_END (bb
));
2718 update_bb_for_insn (bb
);
2720 new_bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
2721 dest
= false_edge
->dest
;
2722 redirect_edge_succ (false_edge
, new_bb
);
2723 false_edge
->flags
|= EDGE_FALLTHRU
;
2724 new_bb
->count
= false_edge
->count ();
2725 loop_p loop
= find_common_loop (bb
->loop_father
, dest
->loop_father
);
2726 add_bb_to_loop (new_bb
, loop
);
2727 if (loop
->latch
== bb
2728 && loop
->header
== dest
)
2729 loop
->latch
= new_bb
;
2730 make_single_succ_edge (new_bb
, dest
, 0);
2731 if (BARRIER_P (BB_END (new_bb
)))
2732 BB_END (new_bb
) = PREV_INSN (BB_END (new_bb
));
2733 update_bb_for_insn (new_bb
);
2735 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
2737 if (true_edge
->goto_locus
!= UNKNOWN_LOCATION
)
2739 set_curr_insn_location (true_edge
->goto_locus
);
2740 true_edge
->goto_locus
= curr_insn_location ();
2746 /* Mark all calls that can have a transaction restart. */
2749 mark_transaction_restart_calls (gimple
*stmt
)
2751 struct tm_restart_node dummy
;
2752 tm_restart_node
**slot
;
2754 if (!cfun
->gimple_df
->tm_restart
)
2758 slot
= cfun
->gimple_df
->tm_restart
->find_slot (&dummy
, NO_INSERT
);
2761 struct tm_restart_node
*n
= *slot
;
2762 tree list
= n
->label_or_list
;
2765 for (insn
= next_real_insn (get_last_insn ());
2767 insn
= next_real_insn (insn
))
2770 if (TREE_CODE (list
) == LABEL_DECL
)
2771 add_reg_note (insn
, REG_TM
, label_rtx (list
));
2773 for (; list
; list
= TREE_CHAIN (list
))
2774 add_reg_note (insn
, REG_TM
, label_rtx (TREE_VALUE (list
)));
2778 /* A subroutine of expand_gimple_stmt_1, expanding one GIMPLE_CALL
2782 expand_call_stmt (gcall
*stmt
)
2784 tree exp
, decl
, lhs
;
2788 if (gimple_call_internal_p (stmt
))
2790 expand_internal_call (stmt
);
2794 /* If this is a call to a built-in function and it has no effect other
2795 than setting the lhs, try to implement it using an internal function
2797 decl
= gimple_call_fndecl (stmt
);
2798 if (gimple_call_lhs (stmt
)
2799 && !gimple_has_side_effects (stmt
)
2800 && (optimize
|| (decl
&& called_as_built_in (decl
))))
2802 internal_fn ifn
= replacement_internal_fn (stmt
);
2803 if (ifn
!= IFN_LAST
)
2805 expand_internal_call (ifn
, stmt
);
2810 exp
= build_vl_exp (CALL_EXPR
, gimple_call_num_args (stmt
) + 3);
2812 CALL_EXPR_FN (exp
) = gimple_call_fn (stmt
);
2813 builtin_p
= decl
&& fndecl_built_in_p (decl
);
2815 /* If this is not a builtin function, the function type through which the
2816 call is made may be different from the type of the function. */
2819 = fold_convert (build_pointer_type (gimple_call_fntype (stmt
)),
2820 CALL_EXPR_FN (exp
));
2822 TREE_TYPE (exp
) = gimple_call_return_type (stmt
);
2823 CALL_EXPR_STATIC_CHAIN (exp
) = gimple_call_chain (stmt
);
2825 for (i
= 0; i
< gimple_call_num_args (stmt
); i
++)
2827 tree arg
= gimple_call_arg (stmt
, i
);
2829 /* TER addresses into arguments of builtin functions so we have a
2830 chance to infer more correct alignment information. See PR39954. */
2832 && TREE_CODE (arg
) == SSA_NAME
2833 && (def
= get_gimple_for_ssa_name (arg
))
2834 && gimple_assign_rhs_code (def
) == ADDR_EXPR
)
2835 arg
= gimple_assign_rhs1 (def
);
2836 CALL_EXPR_ARG (exp
, i
) = arg
;
2839 if (gimple_has_side_effects (stmt
)
2840 /* ??? Downstream in expand_expr_real_1 we assume that expressions
2841 w/o side-effects do not throw so work around this here. */
2842 || stmt_could_throw_p (cfun
, stmt
))
2843 TREE_SIDE_EFFECTS (exp
) = 1;
2845 if (gimple_call_nothrow_p (stmt
))
2846 TREE_NOTHROW (exp
) = 1;
2848 CALL_EXPR_TAILCALL (exp
) = gimple_call_tail_p (stmt
);
2849 CALL_EXPR_MUST_TAIL_CALL (exp
) = gimple_call_must_tail_p (stmt
);
2850 CALL_EXPR_RETURN_SLOT_OPT (exp
) = gimple_call_return_slot_opt_p (stmt
);
2852 && fndecl_built_in_p (decl
, BUILT_IN_NORMAL
)
2853 && ALLOCA_FUNCTION_CODE_P (DECL_FUNCTION_CODE (decl
)))
2854 CALL_ALLOCA_FOR_VAR_P (exp
) = gimple_call_alloca_for_var_p (stmt
);
2856 CALL_FROM_THUNK_P (exp
) = gimple_call_from_thunk_p (stmt
);
2857 CALL_EXPR_VA_ARG_PACK (exp
) = gimple_call_va_arg_pack_p (stmt
);
2858 CALL_EXPR_BY_DESCRIPTOR (exp
) = gimple_call_by_descriptor_p (stmt
);
2859 SET_EXPR_LOCATION (exp
, gimple_location (stmt
));
2861 /* Must come after copying location. */
2862 copy_warning (exp
, stmt
);
2864 /* Ensure RTL is created for debug args. */
2865 if (decl
&& DECL_HAS_DEBUG_ARGS_P (decl
))
2867 vec
<tree
, va_gc
> **debug_args
= decl_debug_args_lookup (decl
);
2872 for (ix
= 1; (*debug_args
)->iterate (ix
, &dtemp
); ix
+= 2)
2874 gcc_assert (TREE_CODE (dtemp
) == DEBUG_EXPR_DECL
);
2875 expand_debug_expr (dtemp
);
2879 rtx_insn
*before_call
= get_last_insn ();
2880 lhs
= gimple_call_lhs (stmt
);
2882 expand_assignment (lhs
, exp
, false);
2884 expand_expr (exp
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
2886 /* If the gimple call is an indirect call and has 'nocf_check'
2887 attribute find a generated CALL insn to mark it as no
2888 control-flow verification is needed. */
2889 if (gimple_call_nocf_check_p (stmt
)
2890 && !gimple_call_fndecl (stmt
))
2892 rtx_insn
*last
= get_last_insn ();
2893 while (!CALL_P (last
)
2894 && last
!= before_call
)
2895 last
= PREV_INSN (last
);
2897 if (last
!= before_call
)
2898 add_reg_note (last
, REG_CALL_NOCF_CHECK
, const0_rtx
);
2901 mark_transaction_restart_calls (stmt
);
2905 /* Generate RTL for an asm statement (explicit assembler code).
2906 STRING is a STRING_CST node containing the assembler code text,
2907 or an ADDR_EXPR containing a STRING_CST. VOL nonzero means the
2908 insn is volatile; don't optimize it. */
2911 expand_asm_loc (tree string
, int vol
, location_t locus
)
2915 body
= gen_rtx_ASM_INPUT_loc (VOIDmode
,
2916 ggc_strdup (TREE_STRING_POINTER (string
)),
2919 MEM_VOLATILE_P (body
) = vol
;
2921 /* Non-empty basic ASM implicitly clobbers memory. */
2922 if (TREE_STRING_LENGTH (string
) != 0)
2925 unsigned i
, nclobbers
;
2926 auto_vec
<rtx
> input_rvec
, output_rvec
;
2927 auto_vec
<machine_mode
> input_mode
;
2928 auto_vec
<const char *> constraints
;
2929 auto_vec
<rtx
> use_rvec
;
2930 auto_vec
<rtx
> clobber_rvec
;
2931 HARD_REG_SET clobbered_regs
;
2932 CLEAR_HARD_REG_SET (clobbered_regs
);
2934 clob
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
2935 clobber_rvec
.safe_push (clob
);
2937 if (targetm
.md_asm_adjust
)
2938 targetm
.md_asm_adjust (output_rvec
, input_rvec
, input_mode
,
2939 constraints
, use_rvec
, clobber_rvec
,
2940 clobbered_regs
, locus
);
2943 nclobbers
= clobber_rvec
.length ();
2944 auto nuses
= use_rvec
.length ();
2945 body
= gen_rtx_PARALLEL (VOIDmode
, rtvec_alloc (1 + nuses
+ nclobbers
));
2948 XVECEXP (body
, 0, i
++) = asm_op
;
2949 for (rtx use
: use_rvec
)
2950 XVECEXP (body
, 0, i
++) = gen_rtx_USE (VOIDmode
, use
);
2951 for (rtx clobber
: clobber_rvec
)
2952 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobber
);
2958 /* Return the number of times character C occurs in string S. */
2960 n_occurrences (int c
, const char *s
)
2968 /* A subroutine of expand_asm_operands. Check that all operands have
2969 the same number of alternatives. Return true if so. */
2972 check_operand_nalternatives (const vec
<const char *> &constraints
)
2974 unsigned len
= constraints
.length();
2977 int nalternatives
= n_occurrences (',', constraints
[0]);
2979 if (nalternatives
+ 1 > MAX_RECOG_ALTERNATIVES
)
2981 error ("too many alternatives in %<asm%>");
2985 for (unsigned i
= 1; i
< len
; ++i
)
2986 if (n_occurrences (',', constraints
[i
]) != nalternatives
)
2988 error ("operand constraints for %<asm%> differ "
2989 "in number of alternatives");
2996 /* Check for overlap between registers marked in CLOBBERED_REGS and
2997 anything inappropriate in T. Emit error and return the register
2998 variable definition for error, NULL_TREE for ok. */
3001 tree_conflicts_with_clobbers_p (tree t
, HARD_REG_SET
*clobbered_regs
,
3004 /* Conflicts between asm-declared register variables and the clobber
3005 list are not allowed. */
3006 tree overlap
= tree_overlaps_hard_reg_set (t
, clobbered_regs
);
3010 error_at (loc
, "%<asm%> specifier for variable %qE conflicts with "
3011 "%<asm%> clobber list", DECL_NAME (overlap
));
3013 /* Reset registerness to stop multiple errors emitted for a single
3015 DECL_REGISTER (overlap
) = 0;
3022 /* Check that the given REGNO spanning NREGS is a valid
3023 asm clobber operand. Some HW registers cannot be
3024 saved/restored, hence they should not be clobbered by
3027 asm_clobber_reg_is_valid (int regno
, int nregs
, const char *regname
)
3029 bool is_valid
= true;
3030 HARD_REG_SET regset
;
3032 CLEAR_HARD_REG_SET (regset
);
3034 add_range_to_hard_reg_set (®set
, regno
, nregs
);
3036 /* Clobbering the PIC register is an error. */
3037 if (PIC_OFFSET_TABLE_REGNUM
!= INVALID_REGNUM
3038 && overlaps_hard_reg_set_p (regset
, Pmode
, PIC_OFFSET_TABLE_REGNUM
))
3040 /* ??? Diagnose during gimplification? */
3041 error ("PIC register clobbered by %qs in %<asm%>", regname
);
3044 else if (!in_hard_reg_set_p
3045 (accessible_reg_set
, reg_raw_mode
[regno
], regno
))
3047 /* ??? Diagnose during gimplification? */
3048 error ("the register %qs cannot be clobbered in %<asm%>"
3049 " for the current target", regname
);
3053 /* Clobbering the stack pointer register is deprecated. GCC expects
3054 the value of the stack pointer after an asm statement to be the same
3055 as it was before, so no asm can validly clobber the stack pointer in
3056 the usual sense. Adding the stack pointer to the clobber list has
3057 traditionally had some undocumented and somewhat obscure side-effects. */
3058 if (overlaps_hard_reg_set_p (regset
, Pmode
, STACK_POINTER_REGNUM
))
3060 crtl
->sp_is_clobbered_by_asm
= true;
3061 if (warning (OPT_Wdeprecated
, "listing the stack pointer register"
3062 " %qs in a clobber list is deprecated", regname
))
3063 inform (input_location
, "the value of the stack pointer after"
3064 " an %<asm%> statement must be the same as it was before"
3071 /* Generate RTL for an asm statement with arguments.
3072 STRING is the instruction template.
3073 OUTPUTS is a list of output arguments (lvalues); INPUTS a list of inputs.
3074 Each output or input has an expression in the TREE_VALUE and
3075 a tree list in TREE_PURPOSE which in turn contains a constraint
3076 name in TREE_VALUE (or NULL_TREE) and a constraint string
3078 CLOBBERS is a list of STRING_CST nodes each naming a hard register
3079 that is clobbered by this insn.
3081 LABELS is a list of labels, and if LABELS is non-NULL, FALLTHRU_BB
3082 should be the fallthru basic block of the asm goto.
3084 Not all kinds of lvalue that may appear in OUTPUTS can be stored directly.
3085 Some elements of OUTPUTS may be replaced with trees representing temporary
3086 values. The caller should copy those temporary values to the originally
3089 VOL nonzero means the insn is volatile; don't optimize it. */
3092 expand_asm_stmt (gasm
*stmt
)
3094 class save_input_location
3099 explicit save_input_location(location_t where
)
3101 old
= input_location
;
3102 input_location
= where
;
3105 ~save_input_location()
3107 input_location
= old
;
3111 location_t locus
= gimple_location (stmt
);
3113 if (gimple_asm_input_p (stmt
))
3115 const char *s
= gimple_asm_string (stmt
);
3116 tree string
= build_string (strlen (s
), s
);
3117 expand_asm_loc (string
, gimple_asm_volatile_p (stmt
), locus
);
3121 /* There are some legacy diagnostics in here. */
3122 save_input_location
s_i_l(locus
);
3124 unsigned noutputs
= gimple_asm_noutputs (stmt
);
3125 unsigned ninputs
= gimple_asm_ninputs (stmt
);
3126 unsigned nlabels
= gimple_asm_nlabels (stmt
);
3128 bool error_seen
= false;
3130 /* ??? Diagnose during gimplification? */
3131 if (ninputs
+ noutputs
+ nlabels
> MAX_RECOG_OPERANDS
)
3133 error_at (locus
, "more than %d operands in %<asm%>", MAX_RECOG_OPERANDS
);
3137 auto_vec
<tree
, MAX_RECOG_OPERANDS
> output_tvec
;
3138 auto_vec
<tree
, MAX_RECOG_OPERANDS
> input_tvec
;
3139 auto_vec
<const char *, MAX_RECOG_OPERANDS
> constraints
;
3141 /* Copy the gimple vectors into new vectors that we can manipulate. */
3143 output_tvec
.safe_grow (noutputs
, true);
3144 input_tvec
.safe_grow (ninputs
, true);
3145 constraints
.safe_grow (noutputs
+ ninputs
, true);
3147 for (i
= 0; i
< noutputs
; ++i
)
3149 tree t
= gimple_asm_output_op (stmt
, i
);
3150 output_tvec
[i
] = TREE_VALUE (t
);
3151 constraints
[i
] = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
3153 for (i
= 0; i
< ninputs
; i
++)
3155 tree t
= gimple_asm_input_op (stmt
, i
);
3156 input_tvec
[i
] = TREE_VALUE (t
);
3157 constraints
[i
+ noutputs
]
3158 = TREE_STRING_POINTER (TREE_VALUE (TREE_PURPOSE (t
)));
3161 /* ??? Diagnose during gimplification? */
3162 if (! check_operand_nalternatives (constraints
))
3165 /* Count the number of meaningful clobbered registers, ignoring what
3166 we would ignore later. */
3167 auto_vec
<rtx
> clobber_rvec
;
3168 HARD_REG_SET clobbered_regs
;
3169 CLEAR_HARD_REG_SET (clobbered_regs
);
3171 if (unsigned n
= gimple_asm_nclobbers (stmt
))
3173 clobber_rvec
.reserve (n
);
3174 for (i
= 0; i
< n
; i
++)
3176 tree t
= gimple_asm_clobber_op (stmt
, i
);
3177 const char *regname
= TREE_STRING_POINTER (TREE_VALUE (t
));
3180 j
= decode_reg_name_and_count (regname
, &nregs
);
3185 /* ??? Diagnose during gimplification? */
3186 error_at (locus
, "unknown register name %qs in %<asm%>",
3192 rtx x
= gen_rtx_MEM (BLKmode
, gen_rtx_SCRATCH (VOIDmode
));
3193 clobber_rvec
.safe_push (x
);
3197 /* Otherwise we should have -1 == empty string
3198 or -3 == cc, which is not a register. */
3199 gcc_assert (j
== -1 || j
== -3);
3203 for (int reg
= j
; reg
< j
+ nregs
; reg
++)
3205 if (!asm_clobber_reg_is_valid (reg
, nregs
, regname
))
3208 SET_HARD_REG_BIT (clobbered_regs
, reg
);
3209 rtx x
= gen_rtx_REG (reg_raw_mode
[reg
], reg
);
3210 clobber_rvec
.safe_push (x
);
3215 /* First pass over inputs and outputs checks validity and sets
3216 mark_addressable if needed. */
3217 /* ??? Diagnose during gimplification? */
3219 for (i
= 0; i
< noutputs
; ++i
)
3221 tree val
= output_tvec
[i
];
3222 tree type
= TREE_TYPE (val
);
3223 const char *constraint
;
3228 /* Try to parse the output constraint. If that fails, there's
3229 no point in going further. */
3230 constraint
= constraints
[i
];
3231 if (!parse_output_constraint (&constraint
, i
, ninputs
, noutputs
,
3232 &allows_mem
, &allows_reg
, &is_inout
))
3235 /* If the output is a hard register, verify it doesn't conflict with
3236 any other operand's possible hard register use. */
3238 && REG_P (DECL_RTL (val
))
3239 && HARD_REGISTER_P (DECL_RTL (val
)))
3241 unsigned j
, output_hregno
= REGNO (DECL_RTL (val
));
3242 bool early_clobber_p
= strchr (constraints
[i
], '&') != NULL
;
3243 unsigned long match
;
3245 /* Verify the other outputs do not use the same hard register. */
3246 for (j
= i
+ 1; j
< noutputs
; ++j
)
3247 if (DECL_P (output_tvec
[j
])
3248 && REG_P (DECL_RTL (output_tvec
[j
]))
3249 && HARD_REGISTER_P (DECL_RTL (output_tvec
[j
]))
3250 && output_hregno
== REGNO (DECL_RTL (output_tvec
[j
])))
3252 error_at (locus
, "invalid hard register usage between output "
3257 /* Verify matching constraint operands use the same hard register
3258 and that the non-matching constraint operands do not use the same
3259 hard register if the output is an early clobber operand. */
3260 for (j
= 0; j
< ninputs
; ++j
)
3261 if (DECL_P (input_tvec
[j
])
3262 && REG_P (DECL_RTL (input_tvec
[j
]))
3263 && HARD_REGISTER_P (DECL_RTL (input_tvec
[j
])))
3265 unsigned input_hregno
= REGNO (DECL_RTL (input_tvec
[j
]));
3266 switch (*constraints
[j
+ noutputs
])
3268 case '0': case '1': case '2': case '3': case '4':
3269 case '5': case '6': case '7': case '8': case '9':
3270 match
= strtoul (constraints
[j
+ noutputs
], NULL
, 10);
3277 && output_hregno
!= input_hregno
)
3279 error_at (locus
, "invalid hard register usage between "
3280 "output operand and matching constraint operand");
3283 else if (early_clobber_p
3285 && output_hregno
== input_hregno
)
3287 error_at (locus
, "invalid hard register usage between "
3288 "earlyclobber operand and input operand");
3298 && REG_P (DECL_RTL (val
))
3299 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
))))
3300 mark_addressable (val
);
3303 for (i
= 0; i
< ninputs
; ++i
)
3305 bool allows_reg
, allows_mem
;
3306 const char *constraint
;
3308 constraint
= constraints
[i
+ noutputs
];
3309 if (! parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3310 constraints
.address (),
3311 &allows_mem
, &allows_reg
))
3314 if (! allows_reg
&& allows_mem
)
3315 mark_addressable (input_tvec
[i
]);
3318 /* Second pass evaluates arguments. */
3320 /* Make sure stack is consistent for asm goto. */
3322 do_pending_stack_adjust ();
3323 int old_generating_concat_p
= generating_concat_p
;
3325 /* Vector of RTX's of evaluated output operands. */
3326 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> output_rvec
;
3327 auto_vec
<int, MAX_RECOG_OPERANDS
> inout_opnum
;
3328 rtx_insn
*after_rtl_seq
= NULL
, *after_rtl_end
= NULL
;
3330 output_rvec
.safe_grow (noutputs
, true);
3332 for (i
= 0; i
< noutputs
; ++i
)
3334 tree val
= output_tvec
[i
];
3335 tree type
= TREE_TYPE (val
);
3336 bool is_inout
, allows_reg
, allows_mem
, ok
;
3339 ok
= parse_output_constraint (&constraints
[i
], i
, ninputs
,
3340 noutputs
, &allows_mem
, &allows_reg
,
3344 /* If an output operand is not a decl or indirect ref and our constraint
3345 allows a register, make a temporary to act as an intermediate.
3346 Make the asm insn write into that, then we will copy it to
3347 the real output operand. Likewise for promoted variables. */
3349 generating_concat_p
= 0;
3351 gcc_assert (TREE_CODE (val
) != INDIRECT_REF
);
3352 if (((TREE_CODE (val
) == MEM_REF
3353 && TREE_CODE (TREE_OPERAND (val
, 0)) != ADDR_EXPR
)
3356 && (allows_mem
|| REG_P (DECL_RTL (val
)))
3357 && ! (REG_P (DECL_RTL (val
))
3358 && GET_MODE (DECL_RTL (val
)) != TYPE_MODE (type
)))
3361 || TREE_ADDRESSABLE (type
)
3362 || (!tree_fits_poly_int64_p (TYPE_SIZE (type
))
3363 && !known_size_p (max_int_size_in_bytes (type
))))
3365 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3366 !allows_reg
? EXPAND_MEMORY
: EXPAND_WRITE
);
3368 op
= validize_mem (op
);
3370 if (! allows_reg
&& !MEM_P (op
))
3372 error_at (locus
, "output number %d not directly addressable", i
);
3375 if ((! allows_mem
&& MEM_P (op
) && GET_MODE (op
) != BLKmode
)
3376 || GET_CODE (op
) == CONCAT
)
3379 op
= gen_reg_rtx (GET_MODE (op
));
3381 generating_concat_p
= old_generating_concat_p
;
3384 emit_move_insn (op
, old_op
);
3386 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3387 emit_move_insn (old_op
, op
);
3388 after_rtl_seq
= get_insns ();
3389 after_rtl_end
= get_last_insn ();
3395 op
= assign_temp (type
, 0, 1);
3396 op
= validize_mem (op
);
3397 if (!MEM_P (op
) && TREE_CODE (val
) == SSA_NAME
)
3398 set_reg_attrs_for_decl_rtl (SSA_NAME_VAR (val
), op
);
3400 generating_concat_p
= old_generating_concat_p
;
3402 push_to_sequence2 (after_rtl_seq
, after_rtl_end
);
3403 expand_assignment (val
, make_tree (type
, op
), false);
3404 after_rtl_seq
= get_insns ();
3405 after_rtl_end
= get_last_insn ();
3408 output_rvec
[i
] = op
;
3411 inout_opnum
.safe_push (i
);
3414 const char *str
= gimple_asm_string (stmt
);
3419 inout_opnum
.truncate (0);
3420 output_rvec
.truncate (0);
3421 clobber_rvec
.truncate (0);
3422 constraints
.truncate (0);
3423 CLEAR_HARD_REG_SET (clobbered_regs
);
3427 auto_vec
<rtx
, MAX_RECOG_OPERANDS
> input_rvec
;
3428 auto_vec
<machine_mode
, MAX_RECOG_OPERANDS
> input_mode
;
3430 input_rvec
.safe_grow (ninputs
, true);
3431 input_mode
.safe_grow (ninputs
, true);
3433 generating_concat_p
= 0;
3435 for (i
= 0; i
< ninputs
; ++i
)
3437 tree val
= input_tvec
[i
];
3438 tree type
= TREE_TYPE (val
);
3439 bool allows_reg
, allows_mem
, ok
;
3440 const char *constraint
;
3443 constraint
= constraints
[i
+ noutputs
];
3444 ok
= parse_input_constraint (&constraint
, i
, ninputs
, noutputs
, 0,
3445 constraints
.address (),
3446 &allows_mem
, &allows_reg
);
3449 /* EXPAND_INITIALIZER will not generate code for valid initializer
3450 constants, but will still generate code for other types of operand.
3451 This is the behavior we want for constant constraints. */
3452 op
= expand_expr (val
, NULL_RTX
, VOIDmode
,
3453 allows_reg
? EXPAND_NORMAL
3454 : allows_mem
? EXPAND_MEMORY
3455 : EXPAND_INITIALIZER
);
3457 /* Never pass a CONCAT to an ASM. */
3458 if (GET_CODE (op
) == CONCAT
)
3459 op
= force_reg (GET_MODE (op
), op
);
3460 else if (MEM_P (op
))
3461 op
= validize_mem (op
);
3463 if (asm_operand_ok (op
, constraint
, NULL
) <= 0)
3465 if (allows_reg
&& TYPE_MODE (type
) != BLKmode
)
3466 op
= force_reg (TYPE_MODE (type
), op
);
3467 else if (!allows_mem
)
3468 warning_at (locus
, 0, "%<asm%> operand %d probably does not match "
3469 "constraints", i
+ noutputs
);
3470 else if (MEM_P (op
))
3472 /* We won't recognize either volatile memory or memory
3473 with a queued address as available a memory_operand
3474 at this point. Ignore it: clearly this *is* a memory. */
3480 input_mode
[i
] = TYPE_MODE (type
);
3483 /* For in-out operands, copy output rtx to input rtx. */
3484 unsigned ninout
= inout_opnum
.length ();
3485 for (i
= 0; i
< ninout
; i
++)
3487 int j
= inout_opnum
[i
];
3488 rtx o
= output_rvec
[j
];
3490 input_rvec
.safe_push (o
);
3491 input_mode
.safe_push (GET_MODE (o
));
3494 sprintf (buffer
, "%d", j
);
3495 constraints
.safe_push (ggc_strdup (buffer
));
3499 /* Sometimes we wish to automatically clobber registers across an asm.
3500 Case in point is when the i386 backend moved from cc0 to a hard reg --
3501 maintaining source-level compatibility means automatically clobbering
3502 the flags register. */
3503 rtx_insn
*after_md_seq
= NULL
;
3504 auto_vec
<rtx
> use_rvec
;
3505 if (targetm
.md_asm_adjust
)
3507 = targetm
.md_asm_adjust (output_rvec
, input_rvec
, input_mode
,
3508 constraints
, use_rvec
, clobber_rvec
,
3509 clobbered_regs
, locus
);
3511 /* Do not allow the hook to change the output and input count,
3512 lest it mess up the operand numbering. */
3513 gcc_assert (output_rvec
.length() == noutputs
);
3514 gcc_assert (input_rvec
.length() == ninputs
);
3515 gcc_assert (constraints
.length() == noutputs
+ ninputs
);
3517 /* But it certainly can adjust the uses and clobbers. */
3518 unsigned nuses
= use_rvec
.length ();
3519 unsigned nclobbers
= clobber_rvec
.length ();
3521 /* Third pass checks for easy conflicts. */
3522 /* ??? Why are we doing this on trees instead of rtx. */
3524 bool clobber_conflict_found
= 0;
3525 for (i
= 0; i
< noutputs
; ++i
)
3526 if (tree_conflicts_with_clobbers_p (output_tvec
[i
], &clobbered_regs
, locus
))
3527 clobber_conflict_found
= 1;
3528 for (i
= 0; i
< ninputs
- ninout
; ++i
)
3529 if (tree_conflicts_with_clobbers_p (input_tvec
[i
], &clobbered_regs
, locus
))
3530 clobber_conflict_found
= 1;
3532 /* Make vectors for the expression-rtx, constraint strings,
3533 and named operands. */
3535 rtvec argvec
= rtvec_alloc (ninputs
);
3536 rtvec constraintvec
= rtvec_alloc (ninputs
);
3537 rtvec labelvec
= rtvec_alloc (nlabels
);
3539 rtx body
= gen_rtx_ASM_OPERANDS ((noutputs
== 0 ? VOIDmode
3540 : GET_MODE (output_rvec
[0])),
3542 "", 0, argvec
, constraintvec
,
3544 MEM_VOLATILE_P (body
) = gimple_asm_volatile_p (stmt
);
3546 for (i
= 0; i
< ninputs
; ++i
)
3548 ASM_OPERANDS_INPUT (body
, i
) = input_rvec
[i
];
3549 ASM_OPERANDS_INPUT_CONSTRAINT_EXP (body
, i
)
3550 = gen_rtx_ASM_INPUT_loc (input_mode
[i
],
3551 constraints
[i
+ noutputs
],
3555 /* Copy labels to the vector. */
3556 rtx_code_label
*fallthru_label
= NULL
;
3559 basic_block fallthru_bb
= NULL
;
3560 edge fallthru
= find_fallthru_edge (gimple_bb (stmt
)->succs
);
3562 fallthru_bb
= fallthru
->dest
;
3564 for (i
= 0; i
< nlabels
; ++i
)
3566 tree label
= TREE_VALUE (gimple_asm_label_op (stmt
, i
));
3568 /* If asm goto has any labels in the fallthru basic block, use
3569 a label that we emit immediately after the asm goto. Expansion
3570 may insert further instructions into the same basic block after
3571 asm goto and if we don't do this, insertion of instructions on
3572 the fallthru edge might misbehave. See PR58670. */
3573 if (fallthru_bb
&& label_to_block (cfun
, label
) == fallthru_bb
)
3575 if (fallthru_label
== NULL_RTX
)
3576 fallthru_label
= gen_label_rtx ();
3580 r
= label_rtx (label
);
3581 ASM_OPERANDS_LABEL (body
, i
) = gen_rtx_LABEL_REF (Pmode
, r
);
3585 /* Now, for each output, construct an rtx
3586 (set OUTPUT (asm_operands INSN OUTPUTCONSTRAINT OUTPUTNUMBER
3587 ARGVEC CONSTRAINTS OPNAMES))
3588 If there is more than one, put them inside a PARALLEL. */
3590 if (noutputs
== 0 && nuses
== 0 && nclobbers
== 0)
3592 /* No output operands: put in a raw ASM_OPERANDS rtx. */
3594 emit_jump_insn (body
);
3598 else if (noutputs
== 1 && nuses
== 0 && nclobbers
== 0)
3600 ASM_OPERANDS_OUTPUT_CONSTRAINT (body
) = constraints
[0];
3602 emit_jump_insn (gen_rtx_SET (output_rvec
[0], body
));
3604 emit_insn (gen_rtx_SET (output_rvec
[0], body
));
3614 body
= gen_rtx_PARALLEL (VOIDmode
,
3615 rtvec_alloc (num
+ nuses
+ nclobbers
));
3617 /* For each output operand, store a SET. */
3618 for (i
= 0; i
< noutputs
; ++i
)
3620 rtx src
, o
= output_rvec
[i
];
3623 ASM_OPERANDS_OUTPUT_CONSTRAINT (obody
) = constraints
[0];
3628 src
= gen_rtx_ASM_OPERANDS (GET_MODE (o
),
3629 ASM_OPERANDS_TEMPLATE (obody
),
3630 constraints
[i
], i
, argvec
,
3631 constraintvec
, labelvec
, locus
);
3632 MEM_VOLATILE_P (src
) = gimple_asm_volatile_p (stmt
);
3634 XVECEXP (body
, 0, i
) = gen_rtx_SET (o
, src
);
3637 /* If there are no outputs (but there are some clobbers)
3638 store the bare ASM_OPERANDS into the PARALLEL. */
3640 XVECEXP (body
, 0, i
++) = obody
;
3642 /* Add the uses specified by the target hook. No checking should
3643 be needed since this doesn't come directly from user code. */
3644 for (rtx use
: use_rvec
)
3645 XVECEXP (body
, 0, i
++) = gen_rtx_USE (VOIDmode
, use
);
3647 /* Store (clobber REG) for each clobbered register specified. */
3648 for (unsigned j
= 0; j
< nclobbers
; ++j
)
3650 rtx clobbered_reg
= clobber_rvec
[j
];
3652 /* Do sanity check for overlap between clobbers and respectively
3653 input and outputs that hasn't been handled. Such overlap
3654 should have been detected and reported above. */
3655 if (!clobber_conflict_found
&& REG_P (clobbered_reg
))
3657 /* We test the old body (obody) contents to avoid
3658 tripping over the under-construction body. */
3659 for (unsigned k
= 0; k
< noutputs
; ++k
)
3660 if (reg_overlap_mentioned_p (clobbered_reg
, output_rvec
[k
]))
3661 internal_error ("%<asm%> clobber conflict with "
3664 for (unsigned k
= 0; k
< ninputs
- ninout
; ++k
)
3665 if (reg_overlap_mentioned_p (clobbered_reg
, input_rvec
[k
]))
3666 internal_error ("%<asm%> clobber conflict with "
3670 XVECEXP (body
, 0, i
++) = gen_rtx_CLOBBER (VOIDmode
, clobbered_reg
);
3674 emit_jump_insn (body
);
3679 generating_concat_p
= old_generating_concat_p
;
3682 emit_label (fallthru_label
);
3685 emit_insn (after_md_seq
);
3689 emit_insn (after_rtl_seq
);
3694 unsigned int cnt
= EDGE_COUNT (gimple_bb (stmt
)->succs
);
3696 FOR_EACH_EDGE (e
, ei
, gimple_bb (stmt
)->succs
)
3700 copy
= after_rtl_seq
;
3704 duplicate_insn_chain (after_rtl_seq
, after_rtl_end
,
3706 copy
= get_insns ();
3709 prepend_insn_to_edge (copy
, e
);
3715 crtl
->has_asm_statement
= 1;
3718 /* Emit code to jump to the address
3719 specified by the pointer expression EXP. */
3722 expand_computed_goto (tree exp
)
3724 rtx x
= expand_normal (exp
);
3726 do_pending_stack_adjust ();
3727 emit_indirect_jump (x
);
3730 /* Generate RTL code for a `goto' statement with target label LABEL.
3731 LABEL should be a LABEL_DECL tree node that was or will later be
3732 defined with `expand_label'. */
3735 expand_goto (tree label
)
3739 /* Check for a nonlocal goto to a containing function. Should have
3740 gotten translated to __builtin_nonlocal_goto. */
3741 tree context
= decl_function_context (label
);
3742 gcc_assert (!context
|| context
== current_function_decl
);
3745 emit_jump (jump_target_rtx (label
));
3748 /* Output a return with no value. */
3751 expand_null_return_1 (void)
3753 clear_pending_stack_adjust ();
3754 do_pending_stack_adjust ();
3755 emit_jump (return_label
);
3758 /* Generate RTL to return from the current function, with no value.
3759 (That is, we do not do anything about returning any value.) */
3762 expand_null_return (void)
3764 /* If this function was declared to return a value, but we
3765 didn't, clobber the return registers so that they are not
3766 propagated live to the rest of the function. */
3767 clobber_return_register ();
3769 expand_null_return_1 ();
3772 /* Generate RTL to return from the current function, with value VAL. */
3775 expand_value_return (rtx val
)
3777 /* Copy the value to the return location unless it's already there. */
3779 tree decl
= DECL_RESULT (current_function_decl
);
3780 rtx return_reg
= DECL_RTL (decl
);
3781 if (return_reg
!= val
)
3783 tree funtype
= TREE_TYPE (current_function_decl
);
3784 tree type
= TREE_TYPE (decl
);
3785 int unsignedp
= TYPE_UNSIGNED (type
);
3786 machine_mode old_mode
= DECL_MODE (decl
);
3788 if (DECL_BY_REFERENCE (decl
))
3789 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 2);
3791 mode
= promote_function_mode (type
, old_mode
, &unsignedp
, funtype
, 1);
3793 if (mode
!= old_mode
)
3795 /* Some ABIs require scalar floating point modes to be returned
3796 in a wider scalar integer mode. We need to explicitly
3797 reinterpret to an integer mode of the correct precision
3798 before extending to the desired result. */
3799 if (SCALAR_INT_MODE_P (mode
)
3800 && SCALAR_FLOAT_MODE_P (old_mode
)
3801 && known_gt (GET_MODE_SIZE (mode
), GET_MODE_SIZE (old_mode
)))
3802 val
= convert_float_to_wider_int (mode
, old_mode
, val
);
3804 val
= convert_modes (mode
, old_mode
, val
, unsignedp
);
3807 if (GET_CODE (return_reg
) == PARALLEL
)
3808 emit_group_load (return_reg
, val
, type
, int_size_in_bytes (type
));
3810 emit_move_insn (return_reg
, val
);
3813 expand_null_return_1 ();
3816 /* Generate RTL to evaluate the expression RETVAL and return it
3817 from the current function. */
3820 expand_return (tree retval
)
3826 /* If function wants no value, give it none. */
3827 if (VOID_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl
))))
3829 expand_normal (retval
);
3830 expand_null_return ();
3834 if (retval
== error_mark_node
)
3836 /* Treat this like a return of no value from a function that
3838 expand_null_return ();
3841 else if ((TREE_CODE (retval
) == MODIFY_EXPR
3842 || TREE_CODE (retval
) == INIT_EXPR
)
3843 && TREE_CODE (TREE_OPERAND (retval
, 0)) == RESULT_DECL
)
3844 retval_rhs
= TREE_OPERAND (retval
, 1);
3846 retval_rhs
= retval
;
3848 result_rtl
= DECL_RTL (DECL_RESULT (current_function_decl
));
3850 /* If we are returning the RESULT_DECL, then the value has already
3851 been stored into it, so we don't have to do anything special. */
3852 if (TREE_CODE (retval_rhs
) == RESULT_DECL
)
3853 expand_value_return (result_rtl
);
3855 /* If the result is an aggregate that is being returned in one (or more)
3856 registers, load the registers here. */
3858 else if (retval_rhs
!= 0
3859 && TYPE_MODE (TREE_TYPE (retval_rhs
)) == BLKmode
3860 && REG_P (result_rtl
))
3862 val
= copy_blkmode_to_reg (GET_MODE (result_rtl
), retval_rhs
);
3865 /* Use the mode of the result value on the return register. */
3866 PUT_MODE (result_rtl
, GET_MODE (val
));
3867 expand_value_return (val
);
3870 expand_null_return ();
3872 else if (retval_rhs
!= 0
3873 && !VOID_TYPE_P (TREE_TYPE (retval_rhs
))
3874 && (REG_P (result_rtl
)
3875 || (GET_CODE (result_rtl
) == PARALLEL
)))
3877 /* Compute the return value into a temporary (usually a pseudo reg). */
3879 = assign_temp (TREE_TYPE (DECL_RESULT (current_function_decl
)), 0, 1);
3880 val
= expand_expr (retval_rhs
, val
, GET_MODE (val
), EXPAND_NORMAL
);
3881 val
= force_not_mem (val
);
3882 expand_value_return (val
);
3886 /* No hard reg used; calculate value into hard return reg. */
3887 expand_expr (retval
, const0_rtx
, VOIDmode
, EXPAND_NORMAL
);
3888 expand_value_return (result_rtl
);
3892 /* Expand a clobber of LHS. If LHS is stored it in a multi-part
3893 register, tell the rtl optimizers that its value is no longer
3897 expand_clobber (tree lhs
)
3901 rtx decl_rtl
= DECL_RTL_IF_SET (lhs
);
3902 if (decl_rtl
&& REG_P (decl_rtl
))
3904 machine_mode decl_mode
= GET_MODE (decl_rtl
);
3905 if (maybe_gt (GET_MODE_SIZE (decl_mode
),
3906 REGMODE_NATURAL_SIZE (decl_mode
)))
3907 emit_clobber (decl_rtl
);
3912 /* A subroutine of expand_gimple_stmt, expanding one gimple statement
3913 STMT that doesn't require special handling for outgoing edges. That
3914 is no tailcalls and no GIMPLE_COND. */
3917 expand_gimple_stmt_1 (gimple
*stmt
)
3921 set_curr_insn_location (gimple_location (stmt
));
3923 switch (gimple_code (stmt
))
3926 op0
= gimple_goto_dest (stmt
);
3927 if (TREE_CODE (op0
) == LABEL_DECL
)
3930 expand_computed_goto (op0
);
3933 expand_label (gimple_label_label (as_a
<glabel
*> (stmt
)));
3936 case GIMPLE_PREDICT
:
3940 gswitch
*swtch
= as_a
<gswitch
*> (stmt
);
3941 if (gimple_switch_num_labels (swtch
) == 1)
3942 expand_goto (CASE_LABEL (gimple_switch_default_label (swtch
)));
3944 expand_case (swtch
);
3948 expand_asm_stmt (as_a
<gasm
*> (stmt
));
3951 expand_call_stmt (as_a
<gcall
*> (stmt
));
3956 op0
= gimple_return_retval (as_a
<greturn
*> (stmt
));
3958 /* If a return doesn't have a location, it very likely represents
3959 multiple user returns so we cannot let it inherit the location
3960 of the last statement of the previous basic block in RTL. */
3961 if (!gimple_has_location (stmt
))
3962 set_curr_insn_location (cfun
->function_end_locus
);
3964 if (op0
&& op0
!= error_mark_node
)
3966 tree result
= DECL_RESULT (current_function_decl
);
3968 /* If we are not returning the current function's RESULT_DECL,
3969 build an assignment to it. */
3972 /* I believe that a function's RESULT_DECL is unique. */
3973 gcc_assert (TREE_CODE (op0
) != RESULT_DECL
);
3975 /* ??? We'd like to use simply expand_assignment here,
3976 but this fails if the value is of BLKmode but the return
3977 decl is a register. expand_return has special handling
3978 for this combination, which eventually should move
3979 to common code. See comments there. Until then, let's
3980 build a modify expression :-/ */
3981 op0
= build2 (MODIFY_EXPR
, TREE_TYPE (result
),
3987 expand_null_return ();
3989 expand_return (op0
);
3995 gassign
*assign_stmt
= as_a
<gassign
*> (stmt
);
3996 tree lhs
= gimple_assign_lhs (assign_stmt
);
3998 /* Tree expand used to fiddle with |= and &= of two bitfield
3999 COMPONENT_REFs here. This can't happen with gimple, the LHS
4000 of binary assigns must be a gimple reg. */
4002 if (TREE_CODE (lhs
) != SSA_NAME
4003 || gimple_assign_rhs_class (assign_stmt
) == GIMPLE_SINGLE_RHS
)
4005 tree rhs
= gimple_assign_rhs1 (assign_stmt
);
4006 gcc_assert (gimple_assign_rhs_class (assign_stmt
)
4007 == GIMPLE_SINGLE_RHS
);
4008 if (gimple_has_location (stmt
) && CAN_HAVE_LOCATION_P (rhs
)
4009 /* Do not put locations on possibly shared trees. */
4010 && !is_gimple_min_invariant (rhs
))
4011 SET_EXPR_LOCATION (rhs
, gimple_location (stmt
));
4012 if (TREE_CLOBBER_P (rhs
))
4013 /* This is a clobber to mark the going out of scope for
4015 expand_clobber (lhs
);
4017 expand_assignment (lhs
, rhs
,
4018 gimple_assign_nontemporal_move_p (
4024 gcc_assert (!gimple_assign_nontemporal_move_p (assign_stmt
));
4025 bool promoted
= false;
4027 target
= expand_expr (lhs
, NULL_RTX
, VOIDmode
, EXPAND_WRITE
);
4028 if (GET_CODE (target
) == SUBREG
&& SUBREG_PROMOTED_VAR_P (target
))
4031 /* If we store into a promoted register, don't directly
4032 expand to target. */
4033 temp
= promoted
? NULL_RTX
: target
;
4034 temp
= expand_expr_real_gassign (assign_stmt
, temp
,
4035 GET_MODE (target
), EXPAND_NORMAL
);
4041 int unsignedp
= SUBREG_PROMOTED_SIGN (target
);
4042 /* If TEMP is a VOIDmode constant, use convert_modes to make
4043 sure that we properly convert it. */
4044 if (CONSTANT_P (temp
) && GET_MODE (temp
) == VOIDmode
)
4046 temp
= convert_modes (GET_MODE (target
),
4047 TYPE_MODE (TREE_TYPE (lhs
)),
4049 temp
= convert_modes (GET_MODE (SUBREG_REG (target
)),
4050 GET_MODE (target
), temp
, unsignedp
);
4053 convert_move (SUBREG_REG (target
), temp
, unsignedp
);
4057 temp
= force_operand (temp
, target
);
4059 emit_move_insn (target
, temp
);
4070 /* Expand one gimple statement STMT and return the last RTL instruction
4071 before any of the newly generated ones.
4073 In addition to generating the necessary RTL instructions this also
4074 sets REG_EH_REGION notes if necessary and sets the current source
4075 location for diagnostics. */
4078 expand_gimple_stmt (gimple
*stmt
)
4080 location_t saved_location
= input_location
;
4081 rtx_insn
*last
= get_last_insn ();
4086 /* We need to save and restore the current source location so that errors
4087 discovered during expansion are emitted with the right location. But
4088 it would be better if the diagnostic routines used the source location
4089 embedded in the tree nodes rather than globals. */
4090 if (gimple_has_location (stmt
))
4091 input_location
= gimple_location (stmt
);
4093 expand_gimple_stmt_1 (stmt
);
4095 /* Free any temporaries used to evaluate this statement. */
4098 input_location
= saved_location
;
4100 /* Mark all insns that may trap. */
4101 lp_nr
= lookup_stmt_eh_lp (stmt
);
4105 for (insn
= next_real_insn (last
); insn
;
4106 insn
= next_real_insn (insn
))
4108 if (! find_reg_note (insn
, REG_EH_REGION
, NULL_RTX
)
4109 /* If we want exceptions for non-call insns, any
4110 may_trap_p instruction may throw. */
4111 && GET_CODE (PATTERN (insn
)) != CLOBBER
4112 && GET_CODE (PATTERN (insn
)) != USE
4113 && insn_could_throw_p (insn
))
4114 make_reg_eh_region_note (insn
, 0, lp_nr
);
4121 /* A subroutine of expand_gimple_basic_block. Expand one GIMPLE_CALL
4122 that has CALL_EXPR_TAILCALL set. Returns non-null if we actually
4123 generated a tail call (something that might be denied by the ABI
4124 rules governing the call; see calls.cc).
4126 Sets CAN_FALLTHRU if we generated a *conditional* tail call, and
4127 can still reach the rest of BB. The case here is __builtin_sqrt,
4128 where the NaN result goes through the external function (with a
4129 tailcall) and the normal result happens via a sqrt instruction. */
4132 expand_gimple_tailcall (basic_block bb
, gcall
*stmt
, bool *can_fallthru
)
4134 rtx_insn
*last2
, *last
;
4137 profile_probability probability
;
4139 last2
= last
= expand_gimple_stmt (stmt
);
4141 for (last
= NEXT_INSN (last
); last
; last
= NEXT_INSN (last
))
4142 if (CALL_P (last
) && SIBLING_CALL_P (last
))
4145 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
4147 *can_fallthru
= true;
4151 /* ??? Wouldn't it be better to just reset any pending stack adjust?
4152 Any instructions emitted here are about to be deleted. */
4153 do_pending_stack_adjust ();
4155 /* Remove any non-eh, non-abnormal edges that don't go to exit. */
4156 /* ??? I.e. the fallthrough edge. HOWEVER! If there were to be
4157 EH or abnormal edges, we shouldn't have created a tail call in
4158 the first place. So it seems to me we should just be removing
4159 all edges here, or redirecting the existing fallthru edge to
4162 probability
= profile_probability::never ();
4164 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
4166 if (!(e
->flags
& (EDGE_ABNORMAL
| EDGE_EH
)))
4168 if (e
->dest
!= EXIT_BLOCK_PTR_FOR_FN (cfun
))
4169 e
->dest
->count
-= e
->count ();
4170 probability
+= e
->probability
;
4177 /* This is somewhat ugly: the call_expr expander often emits instructions
4178 after the sibcall (to perform the function return). These confuse the
4179 find_many_sub_basic_blocks code, so we need to get rid of these. */
4180 last
= NEXT_INSN (last
);
4181 gcc_assert (BARRIER_P (last
));
4183 *can_fallthru
= false;
4184 while (NEXT_INSN (last
))
4186 /* For instance an sqrt builtin expander expands if with
4187 sibcall in the then and label for `else`. */
4188 if (LABEL_P (NEXT_INSN (last
)))
4190 *can_fallthru
= true;
4193 delete_insn (NEXT_INSN (last
));
4196 e
= make_edge (bb
, EXIT_BLOCK_PTR_FOR_FN (cfun
), EDGE_ABNORMAL
4198 e
->probability
= probability
;
4200 update_bb_for_insn (bb
);
4202 if (NEXT_INSN (last
))
4204 bb
= create_basic_block (NEXT_INSN (last
), get_last_insn (), bb
);
4207 if (BARRIER_P (last
))
4208 BB_END (bb
) = PREV_INSN (last
);
4211 maybe_dump_rtl_for_gimple_stmt (stmt
, last2
);
4216 /* Return the difference between the floor and the truncated result of
4217 a signed division by OP1 with remainder MOD. */
4219 floor_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4221 /* (mod != 0 ? (op1 / mod < 0 ? -1 : 0) : 0) */
4222 return gen_rtx_IF_THEN_ELSE
4223 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4224 gen_rtx_IF_THEN_ELSE
4225 (mode
, gen_rtx_LT (BImode
,
4226 gen_rtx_DIV (mode
, op1
, mod
),
4228 constm1_rtx
, const0_rtx
),
4232 /* Return the difference between the ceil and the truncated result of
4233 a signed division by OP1 with remainder MOD. */
4235 ceil_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4237 /* (mod != 0 ? (op1 / mod > 0 ? 1 : 0) : 0) */
4238 return gen_rtx_IF_THEN_ELSE
4239 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4240 gen_rtx_IF_THEN_ELSE
4241 (mode
, gen_rtx_GT (BImode
,
4242 gen_rtx_DIV (mode
, op1
, mod
),
4244 const1_rtx
, const0_rtx
),
4248 /* Return the difference between the ceil and the truncated result of
4249 an unsigned division by OP1 with remainder MOD. */
4251 ceil_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1 ATTRIBUTE_UNUSED
)
4253 /* (mod != 0 ? 1 : 0) */
4254 return gen_rtx_IF_THEN_ELSE
4255 (mode
, gen_rtx_NE (BImode
, mod
, const0_rtx
),
4256 const1_rtx
, const0_rtx
);
4259 /* Return the difference between the rounded and the truncated result
4260 of a signed division by OP1 with remainder MOD. Halfway cases are
4261 rounded away from zero, rather than to the nearest even number. */
4263 round_sdiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4265 /* (abs (mod) >= abs (op1) - abs (mod)
4266 ? (op1 / mod > 0 ? 1 : -1)
4268 return gen_rtx_IF_THEN_ELSE
4269 (mode
, gen_rtx_GE (BImode
, gen_rtx_ABS (mode
, mod
),
4270 gen_rtx_MINUS (mode
,
4271 gen_rtx_ABS (mode
, op1
),
4272 gen_rtx_ABS (mode
, mod
))),
4273 gen_rtx_IF_THEN_ELSE
4274 (mode
, gen_rtx_GT (BImode
,
4275 gen_rtx_DIV (mode
, op1
, mod
),
4277 const1_rtx
, constm1_rtx
),
4281 /* Return the difference between the rounded and the truncated result
4282 of a unsigned division by OP1 with remainder MOD. Halfway cases
4283 are rounded away from zero, rather than to the nearest even
4286 round_udiv_adjust (machine_mode mode
, rtx mod
, rtx op1
)
4288 /* (mod >= op1 - mod ? 1 : 0) */
4289 return gen_rtx_IF_THEN_ELSE
4290 (mode
, gen_rtx_GE (BImode
, mod
,
4291 gen_rtx_MINUS (mode
, op1
, mod
)),
4292 const1_rtx
, const0_rtx
);
4295 /* Convert X to MODE, that must be Pmode or ptr_mode, without emitting
4299 convert_debug_memory_address (scalar_int_mode mode
, rtx x
,
4302 #ifndef POINTERS_EXTEND_UNSIGNED
4303 gcc_assert (mode
== Pmode
4304 || mode
== targetm
.addr_space
.address_mode (as
));
4305 gcc_assert (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
);
4309 gcc_assert (targetm
.addr_space
.valid_pointer_mode (mode
, as
));
4311 if (GET_MODE (x
) == mode
|| GET_MODE (x
) == VOIDmode
)
4314 /* X must have some form of address mode already. */
4315 scalar_int_mode xmode
= as_a
<scalar_int_mode
> (GET_MODE (x
));
4316 if (GET_MODE_PRECISION (mode
) < GET_MODE_PRECISION (xmode
))
4317 x
= lowpart_subreg (mode
, x
, xmode
);
4318 else if (POINTERS_EXTEND_UNSIGNED
> 0)
4319 x
= gen_rtx_ZERO_EXTEND (mode
, x
);
4320 else if (!POINTERS_EXTEND_UNSIGNED
)
4321 x
= gen_rtx_SIGN_EXTEND (mode
, x
);
4324 switch (GET_CODE (x
))
4327 if ((SUBREG_PROMOTED_VAR_P (x
)
4328 || (REG_P (SUBREG_REG (x
)) && REG_POINTER (SUBREG_REG (x
)))
4329 || (GET_CODE (SUBREG_REG (x
)) == PLUS
4330 && REG_P (XEXP (SUBREG_REG (x
), 0))
4331 && REG_POINTER (XEXP (SUBREG_REG (x
), 0))
4332 && CONST_INT_P (XEXP (SUBREG_REG (x
), 1))))
4333 && GET_MODE (SUBREG_REG (x
)) == mode
)
4334 return SUBREG_REG (x
);
4337 temp
= gen_rtx_LABEL_REF (mode
, label_ref_label (x
));
4338 LABEL_REF_NONLOCAL_P (temp
) = LABEL_REF_NONLOCAL_P (x
);
4341 temp
= shallow_copy_rtx (x
);
4342 PUT_MODE (temp
, mode
);
4345 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4347 temp
= gen_rtx_CONST (mode
, temp
);
4351 if (CONST_INT_P (XEXP (x
, 1)))
4353 temp
= convert_debug_memory_address (mode
, XEXP (x
, 0), as
);
4355 return gen_rtx_fmt_ee (GET_CODE (x
), mode
, temp
, XEXP (x
, 1));
4361 /* Don't know how to express ptr_extend as operation in debug info. */
4364 #endif /* POINTERS_EXTEND_UNSIGNED */
4369 /* Map from SSA_NAMEs to corresponding DEBUG_EXPR_DECLs created
4370 by avoid_deep_ter_for_debug. */
4372 static hash_map
<tree
, tree
> *deep_ter_debug_map
;
4374 /* Split too deep TER chains for debug stmts using debug temporaries. */
4377 avoid_deep_ter_for_debug (gimple
*stmt
, int depth
)
4379 use_operand_p use_p
;
4381 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
4383 tree use
= USE_FROM_PTR (use_p
);
4384 if (TREE_CODE (use
) != SSA_NAME
|| SSA_NAME_IS_DEFAULT_DEF (use
))
4386 gimple
*g
= get_gimple_for_ssa_name (use
);
4389 if (depth
> 6 && !stmt_ends_bb_p (g
))
4391 if (deep_ter_debug_map
== NULL
)
4392 deep_ter_debug_map
= new hash_map
<tree
, tree
>;
4394 tree
&vexpr
= deep_ter_debug_map
->get_or_insert (use
);
4397 vexpr
= build_debug_expr_decl (TREE_TYPE (use
));
4398 gimple
*def_temp
= gimple_build_debug_bind (vexpr
, use
, g
);
4399 gimple_stmt_iterator gsi
= gsi_for_stmt (g
);
4400 gsi_insert_after (&gsi
, def_temp
, GSI_NEW_STMT
);
4401 avoid_deep_ter_for_debug (def_temp
, 0);
4404 avoid_deep_ter_for_debug (g
, depth
+ 1);
4408 /* Return an RTX equivalent to the value of the parameter DECL. */
4411 expand_debug_parm_decl (tree decl
)
4413 rtx incoming
= DECL_INCOMING_RTL (decl
);
4416 && GET_MODE (incoming
) != BLKmode
4417 && ((REG_P (incoming
) && HARD_REGISTER_P (incoming
))
4418 || (MEM_P (incoming
)
4419 && REG_P (XEXP (incoming
, 0))
4420 && HARD_REGISTER_P (XEXP (incoming
, 0)))))
4422 rtx rtl
= gen_rtx_ENTRY_VALUE (GET_MODE (incoming
));
4424 #ifdef HAVE_window_save
4425 /* DECL_INCOMING_RTL uses the INCOMING_REGNO of parameter registers.
4426 If the target machine has an explicit window save instruction, the
4427 actual entry value is the corresponding OUTGOING_REGNO instead. */
4428 if (REG_P (incoming
)
4429 && OUTGOING_REGNO (REGNO (incoming
)) != REGNO (incoming
))
4431 = gen_rtx_REG_offset (incoming
, GET_MODE (incoming
),
4432 OUTGOING_REGNO (REGNO (incoming
)), 0);
4433 else if (MEM_P (incoming
))
4435 rtx reg
= XEXP (incoming
, 0);
4436 if (OUTGOING_REGNO (REGNO (reg
)) != REGNO (reg
))
4438 reg
= gen_raw_REG (GET_MODE (reg
), OUTGOING_REGNO (REGNO (reg
)));
4439 incoming
= replace_equiv_address_nv (incoming
, reg
);
4442 incoming
= copy_rtx (incoming
);
4446 ENTRY_VALUE_EXP (rtl
) = incoming
;
4451 && GET_MODE (incoming
) != BLKmode
4452 && !TREE_ADDRESSABLE (decl
)
4454 && (XEXP (incoming
, 0) == virtual_incoming_args_rtx
4455 || (GET_CODE (XEXP (incoming
, 0)) == PLUS
4456 && XEXP (XEXP (incoming
, 0), 0) == virtual_incoming_args_rtx
4457 && CONST_INT_P (XEXP (XEXP (incoming
, 0), 1)))))
4458 return copy_rtx (incoming
);
4463 /* Return an RTX equivalent to the value of the tree expression EXP. */
4466 expand_debug_expr (tree exp
)
4468 rtx op0
= NULL_RTX
, op1
= NULL_RTX
, op2
= NULL_RTX
;
4469 machine_mode mode
= TYPE_MODE (TREE_TYPE (exp
));
4470 machine_mode inner_mode
= VOIDmode
;
4471 int unsignedp
= TYPE_UNSIGNED (TREE_TYPE (exp
));
4473 scalar_int_mode op0_mode
, op1_mode
, addr_mode
;
4475 switch (TREE_CODE_CLASS (TREE_CODE (exp
)))
4477 case tcc_expression
:
4478 switch (TREE_CODE (exp
))
4483 case WIDEN_MULT_PLUS_EXPR
:
4484 case WIDEN_MULT_MINUS_EXPR
:
4487 case TRUTH_ANDIF_EXPR
:
4488 case TRUTH_ORIF_EXPR
:
4489 case TRUTH_AND_EXPR
:
4491 case TRUTH_XOR_EXPR
:
4494 case TRUTH_NOT_EXPR
:
4503 op2
= expand_debug_expr (TREE_OPERAND (exp
, 2));
4510 if (mode
== BLKmode
)
4512 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4515 switch (TREE_CODE (exp
))
4521 case WIDEN_LSHIFT_EXPR
:
4522 /* Ensure second operand isn't wider than the first one. */
4523 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 1)));
4524 if (is_a
<scalar_int_mode
> (inner_mode
, &op1_mode
)
4525 && (GET_MODE_UNIT_PRECISION (mode
)
4526 < GET_MODE_PRECISION (op1_mode
)))
4527 op1
= lowpart_subreg (GET_MODE_INNER (mode
), op1
, op1_mode
);
4536 if (mode
== BLKmode
)
4538 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4539 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4544 case tcc_comparison
:
4545 unsignedp
= TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4553 case tcc_exceptional
:
4554 case tcc_declaration
:
4560 switch (TREE_CODE (exp
))
4563 if (!lookup_constant_def (exp
))
4565 if (strlen (TREE_STRING_POINTER (exp
)) + 1
4566 != (size_t) TREE_STRING_LENGTH (exp
))
4568 op0
= gen_rtx_CONST_STRING (Pmode
, TREE_STRING_POINTER (exp
));
4569 op0
= gen_rtx_MEM (BLKmode
, op0
);
4570 set_mem_attributes (op0
, exp
, 0);
4576 if (TREE_CODE (TREE_TYPE (exp
)) == BITINT_TYPE
4577 && TYPE_MODE (TREE_TYPE (exp
)) == BLKmode
)
4582 op0
= expand_expr (exp
, NULL_RTX
, mode
, EXPAND_INITIALIZER
);
4586 return immed_wide_int_const (poly_int_cst_value (exp
), mode
);
4589 gcc_assert (COMPLEX_MODE_P (mode
));
4590 op0
= expand_debug_expr (TREE_REALPART (exp
));
4591 op1
= expand_debug_expr (TREE_IMAGPART (exp
));
4592 return gen_rtx_CONCAT (mode
, op0
, op1
);
4594 case DEBUG_EXPR_DECL
:
4595 op0
= DECL_RTL_IF_SET (exp
);
4599 if (GET_MODE (op0
) != mode
)
4600 gcc_assert (VECTOR_TYPE_P (TREE_TYPE (exp
)));
4605 op0
= gen_rtx_DEBUG_EXPR (mode
);
4606 DEBUG_EXPR_TREE_DECL (op0
) = exp
;
4607 SET_DECL_RTL (exp
, op0
);
4617 op0
= DECL_RTL_IF_SET (exp
);
4619 /* This decl was probably optimized away. */
4621 /* At least label RTXen are sometimes replaced by
4622 NOTE_INSN_DELETED_LABEL. Any notes here are not
4623 handled by copy_rtx. */
4627 || DECL_EXTERNAL (exp
)
4628 || !TREE_STATIC (exp
)
4630 || DECL_HARD_REGISTER (exp
)
4631 || DECL_IN_CONSTANT_POOL (exp
)
4633 || symtab_node::get (exp
) == NULL
)
4636 op0
= make_decl_rtl_for_debug (exp
);
4638 || GET_CODE (XEXP (op0
, 0)) != SYMBOL_REF
4639 || SYMBOL_REF_DECL (XEXP (op0
, 0)) != exp
)
4642 else if (VAR_P (exp
)
4643 && is_global_var (exp
)
4644 && symtab_node::get (exp
) == NULL
)
4647 op0
= copy_rtx (op0
);
4649 if (GET_MODE (op0
) == BLKmode
4650 /* If op0 is not BLKmode, but mode is, adjust_mode
4651 below would ICE. While it is likely a FE bug,
4652 try to be robust here. See PR43166. */
4654 || (mode
== VOIDmode
&& GET_MODE (op0
) != VOIDmode
))
4656 gcc_assert (MEM_P (op0
));
4657 op0
= adjust_address_nv (op0
, mode
, 0);
4667 inner_mode
= GET_MODE (op0
);
4669 if (mode
== inner_mode
)
4672 if (inner_mode
== VOIDmode
)
4674 if (TREE_CODE (exp
) == SSA_NAME
)
4675 inner_mode
= TYPE_MODE (TREE_TYPE (exp
));
4677 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4678 if (mode
== inner_mode
)
4682 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
4684 if (GET_MODE_UNIT_BITSIZE (mode
)
4685 == GET_MODE_UNIT_BITSIZE (inner_mode
))
4686 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
4687 else if (GET_MODE_UNIT_BITSIZE (mode
)
4688 < GET_MODE_UNIT_BITSIZE (inner_mode
))
4689 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
4691 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
4693 else if (FLOAT_MODE_P (mode
))
4695 gcc_assert (TREE_CODE (exp
) != SSA_NAME
);
4696 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
4697 op0
= simplify_gen_unary (UNSIGNED_FLOAT
, mode
, op0
, inner_mode
);
4699 op0
= simplify_gen_unary (FLOAT
, mode
, op0
, inner_mode
);
4701 else if (FLOAT_MODE_P (inner_mode
))
4704 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
4706 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
4708 else if (GET_MODE_UNIT_PRECISION (mode
)
4709 == GET_MODE_UNIT_PRECISION (inner_mode
))
4710 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
4711 else if (GET_MODE_UNIT_PRECISION (mode
)
4712 < GET_MODE_UNIT_PRECISION (inner_mode
))
4713 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
4714 else if (UNARY_CLASS_P (exp
)
4715 ? TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0)))
4717 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
4719 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
4725 if (!is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4727 tree newexp
= fold_binary (MEM_REF
, TREE_TYPE (exp
),
4728 TREE_OPERAND (exp
, 0),
4729 TREE_OPERAND (exp
, 1));
4731 return expand_debug_expr (newexp
);
4735 inner_mode
= TYPE_MODE (TREE_TYPE (TREE_OPERAND (exp
, 0)));
4736 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
4740 if (TREE_CODE (exp
) == MEM_REF
)
4742 if (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
4743 || (GET_CODE (op0
) == PLUS
4744 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
))
4745 /* (mem (debug_implicit_ptr)) might confuse aliasing.
4746 Instead just use get_inner_reference. */
4749 op1
= expand_debug_expr (TREE_OPERAND (exp
, 1));
4751 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
4754 op0
= plus_constant (inner_mode
, op0
, offset
);
4757 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4759 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4761 if (op0
== NULL_RTX
)
4764 op0
= gen_rtx_MEM (mode
, op0
);
4765 set_mem_attributes (op0
, exp
, 0);
4766 if (TREE_CODE (exp
) == MEM_REF
4767 && !is_gimple_mem_ref_addr (TREE_OPERAND (exp
, 0)))
4768 set_mem_expr (op0
, NULL_TREE
);
4769 set_mem_addr_space (op0
, as
);
4773 case TARGET_MEM_REF
:
4774 if (TREE_CODE (TMR_BASE (exp
)) == ADDR_EXPR
4775 && !DECL_RTL_SET_P (TREE_OPERAND (TMR_BASE (exp
), 0)))
4778 op0
= expand_debug_expr
4779 (tree_mem_ref_addr (build_pointer_type (TREE_TYPE (exp
)), exp
));
4783 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp
, 0))));
4784 op0
= convert_debug_memory_address (targetm
.addr_space
.address_mode (as
),
4786 if (op0
== NULL_RTX
)
4789 op0
= gen_rtx_MEM (mode
, op0
);
4791 set_mem_attributes (op0
, exp
, 0);
4792 set_mem_addr_space (op0
, as
);
4798 case ARRAY_RANGE_REF
:
4803 case VIEW_CONVERT_EXPR
:
4806 poly_int64 bitsize
, bitpos
;
4808 int reversep
, volatilep
= 0;
4810 = get_inner_reference (exp
, &bitsize
, &bitpos
, &offset
, &mode1
,
4811 &unsignedp
, &reversep
, &volatilep
);
4814 if (known_eq (bitsize
, 0))
4817 orig_op0
= op0
= expand_debug_expr (tem
);
4824 machine_mode addrmode
, offmode
;
4829 op0
= XEXP (op0
, 0);
4830 addrmode
= GET_MODE (op0
);
4831 if (addrmode
== VOIDmode
)
4834 op1
= expand_debug_expr (offset
);
4838 offmode
= GET_MODE (op1
);
4839 if (offmode
== VOIDmode
)
4840 offmode
= TYPE_MODE (TREE_TYPE (offset
));
4842 if (addrmode
!= offmode
)
4843 op1
= lowpart_subreg (addrmode
, op1
, offmode
);
4845 /* Don't use offset_address here, we don't need a
4846 recognizable address, and we don't want to generate
4848 op0
= gen_rtx_MEM (mode
, simplify_gen_binary (PLUS
, addrmode
,
4854 if (mode1
== VOIDmode
)
4856 if (maybe_gt (bitsize
, MAX_BITSIZE_MODE_ANY_INT
))
4859 mode1
= smallest_int_mode_for_size (bitsize
);
4861 poly_int64 bytepos
= bits_to_bytes_round_down (bitpos
);
4862 if (maybe_ne (bytepos
, 0))
4864 op0
= adjust_address_nv (op0
, mode1
, bytepos
);
4865 bitpos
= num_trailing_bits (bitpos
);
4867 else if (known_eq (bitpos
, 0)
4868 && known_eq (bitsize
, GET_MODE_BITSIZE (mode
)))
4869 op0
= adjust_address_nv (op0
, mode
, 0);
4870 else if (GET_MODE (op0
) != mode1
)
4871 op0
= adjust_address_nv (op0
, mode1
, 0);
4873 op0
= copy_rtx (op0
);
4874 if (op0
== orig_op0
)
4875 op0
= shallow_copy_rtx (op0
);
4876 if (TREE_CODE (tem
) != SSA_NAME
)
4877 set_mem_attributes (op0
, exp
, 0);
4880 if (known_eq (bitpos
, 0) && mode
== GET_MODE (op0
))
4883 if (maybe_lt (bitpos
, 0))
4886 if (GET_MODE (op0
) == BLKmode
|| mode
== BLKmode
)
4890 if (multiple_p (bitpos
, BITS_PER_UNIT
, &bytepos
)
4891 && known_eq (bitsize
, GET_MODE_BITSIZE (mode1
)))
4893 machine_mode opmode
= GET_MODE (op0
);
4895 if (opmode
== VOIDmode
)
4896 opmode
= TYPE_MODE (TREE_TYPE (tem
));
4898 /* This condition may hold if we're expanding the address
4899 right past the end of an array that turned out not to
4900 be addressable (i.e., the address was only computed in
4901 debug stmts). The gen_subreg below would rightfully
4902 crash, and the address doesn't really exist, so just
4904 if (known_ge (bitpos
, GET_MODE_BITSIZE (opmode
)))
4907 if (multiple_p (bitpos
, GET_MODE_BITSIZE (mode
)))
4908 return simplify_gen_subreg (mode
, op0
, opmode
, bytepos
);
4911 return simplify_gen_ternary (SCALAR_INT_MODE_P (GET_MODE (op0
))
4912 && TYPE_UNSIGNED (TREE_TYPE (exp
))
4914 : ZERO_EXTRACT
, mode
,
4915 GET_MODE (op0
) != VOIDmode
4917 : TYPE_MODE (TREE_TYPE (tem
)),
4918 op0
, gen_int_mode (bitsize
, word_mode
),
4919 gen_int_mode (bitpos
, word_mode
));
4924 return simplify_gen_unary (ABS
, mode
, op0
, mode
);
4927 return simplify_gen_unary (NEG
, mode
, op0
, mode
);
4930 return simplify_gen_unary (NOT
, mode
, op0
, mode
);
4933 return simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
4935 ? UNSIGNED_FLOAT
: FLOAT
, mode
, op0
,
4938 case FIX_TRUNC_EXPR
:
4939 return simplify_gen_unary (unsignedp
? UNSIGNED_FIX
: FIX
, mode
, op0
,
4942 case POINTER_PLUS_EXPR
:
4943 /* For the rare target where pointers are not the same size as
4944 size_t, we need to check for mis-matched modes and correct
4947 && is_a
<scalar_int_mode
> (GET_MODE (op0
), &op0_mode
)
4948 && is_a
<scalar_int_mode
> (GET_MODE (op1
), &op1_mode
)
4949 && op0_mode
!= op1_mode
)
4951 if (GET_MODE_BITSIZE (op0_mode
) < GET_MODE_BITSIZE (op1_mode
)
4952 /* If OP0 is a partial mode, then we must truncate, even
4953 if it has the same bitsize as OP1 as GCC's
4954 representation of partial modes is opaque. */
4955 || (GET_MODE_CLASS (op0_mode
) == MODE_PARTIAL_INT
4956 && (GET_MODE_BITSIZE (op0_mode
)
4957 == GET_MODE_BITSIZE (op1_mode
))))
4958 op1
= simplify_gen_unary (TRUNCATE
, op0_mode
, op1
, op1_mode
);
4960 /* We always sign-extend, regardless of the signedness of
4961 the operand, because the operand is always unsigned
4962 here even if the original C expression is signed. */
4963 op1
= simplify_gen_unary (SIGN_EXTEND
, op0_mode
, op1
, op1_mode
);
4967 return simplify_gen_binary (PLUS
, mode
, op0
, op1
);
4970 case POINTER_DIFF_EXPR
:
4971 return simplify_gen_binary (MINUS
, mode
, op0
, op1
);
4974 return simplify_gen_binary (MULT
, mode
, op0
, op1
);
4977 case TRUNC_DIV_EXPR
:
4978 case EXACT_DIV_EXPR
:
4980 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4982 return simplify_gen_binary (DIV
, mode
, op0
, op1
);
4984 case TRUNC_MOD_EXPR
:
4985 return simplify_gen_binary (unsignedp
? UMOD
: MOD
, mode
, op0
, op1
);
4987 case FLOOR_DIV_EXPR
:
4989 return simplify_gen_binary (UDIV
, mode
, op0
, op1
);
4992 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
4993 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
4994 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
4995 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
4998 case FLOOR_MOD_EXPR
:
5000 return simplify_gen_binary (UMOD
, mode
, op0
, op1
);
5003 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
5004 rtx adj
= floor_sdiv_adjust (mode
, mod
, op1
);
5005 adj
= simplify_gen_unary (NEG
, mode
,
5006 simplify_gen_binary (MULT
, mode
, adj
, op1
),
5008 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
5014 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
5015 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
5016 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
5017 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
5021 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
5022 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
5023 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
5024 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
5030 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
5031 rtx adj
= ceil_udiv_adjust (mode
, mod
, op1
);
5032 adj
= simplify_gen_unary (NEG
, mode
,
5033 simplify_gen_binary (MULT
, mode
, adj
, op1
),
5035 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
5039 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
5040 rtx adj
= ceil_sdiv_adjust (mode
, mod
, op1
);
5041 adj
= simplify_gen_unary (NEG
, mode
,
5042 simplify_gen_binary (MULT
, mode
, adj
, op1
),
5044 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
5047 case ROUND_DIV_EXPR
:
5050 rtx div
= simplify_gen_binary (UDIV
, mode
, op0
, op1
);
5051 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
5052 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
5053 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
5057 rtx div
= simplify_gen_binary (DIV
, mode
, op0
, op1
);
5058 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
5059 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
5060 return simplify_gen_binary (PLUS
, mode
, div
, adj
);
5063 case ROUND_MOD_EXPR
:
5066 rtx mod
= simplify_gen_binary (UMOD
, mode
, op0
, op1
);
5067 rtx adj
= round_udiv_adjust (mode
, mod
, op1
);
5068 adj
= simplify_gen_unary (NEG
, mode
,
5069 simplify_gen_binary (MULT
, mode
, adj
, op1
),
5071 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
5075 rtx mod
= simplify_gen_binary (MOD
, mode
, op0
, op1
);
5076 rtx adj
= round_sdiv_adjust (mode
, mod
, op1
);
5077 adj
= simplify_gen_unary (NEG
, mode
,
5078 simplify_gen_binary (MULT
, mode
, adj
, op1
),
5080 return simplify_gen_binary (PLUS
, mode
, mod
, adj
);
5084 return simplify_gen_binary (ASHIFT
, mode
, op0
, op1
);
5088 return simplify_gen_binary (LSHIFTRT
, mode
, op0
, op1
);
5090 return simplify_gen_binary (ASHIFTRT
, mode
, op0
, op1
);
5093 return simplify_gen_binary (ROTATE
, mode
, op0
, op1
);
5096 return simplify_gen_binary (ROTATERT
, mode
, op0
, op1
);
5099 return simplify_gen_binary (unsignedp
? UMIN
: SMIN
, mode
, op0
, op1
);
5102 return simplify_gen_binary (unsignedp
? UMAX
: SMAX
, mode
, op0
, op1
);
5105 case TRUTH_AND_EXPR
:
5106 return simplify_gen_binary (AND
, mode
, op0
, op1
);
5110 return simplify_gen_binary (IOR
, mode
, op0
, op1
);
5113 case TRUTH_XOR_EXPR
:
5114 return simplify_gen_binary (XOR
, mode
, op0
, op1
);
5116 case TRUTH_ANDIF_EXPR
:
5117 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, const0_rtx
);
5119 case TRUTH_ORIF_EXPR
:
5120 return gen_rtx_IF_THEN_ELSE (mode
, op0
, const_true_rtx
, op1
);
5122 case TRUTH_NOT_EXPR
:
5123 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, const0_rtx
);
5126 return simplify_gen_relational (unsignedp
? LTU
: LT
, mode
, inner_mode
,
5130 return simplify_gen_relational (unsignedp
? LEU
: LE
, mode
, inner_mode
,
5134 return simplify_gen_relational (unsignedp
? GTU
: GT
, mode
, inner_mode
,
5138 return simplify_gen_relational (unsignedp
? GEU
: GE
, mode
, inner_mode
,
5142 return simplify_gen_relational (EQ
, mode
, inner_mode
, op0
, op1
);
5145 return simplify_gen_relational (NE
, mode
, inner_mode
, op0
, op1
);
5147 case UNORDERED_EXPR
:
5148 return simplify_gen_relational (UNORDERED
, mode
, inner_mode
, op0
, op1
);
5151 return simplify_gen_relational (ORDERED
, mode
, inner_mode
, op0
, op1
);
5154 return simplify_gen_relational (UNLT
, mode
, inner_mode
, op0
, op1
);
5157 return simplify_gen_relational (UNLE
, mode
, inner_mode
, op0
, op1
);
5160 return simplify_gen_relational (UNGT
, mode
, inner_mode
, op0
, op1
);
5163 return simplify_gen_relational (UNGE
, mode
, inner_mode
, op0
, op1
);
5166 return simplify_gen_relational (UNEQ
, mode
, inner_mode
, op0
, op1
);
5169 return simplify_gen_relational (LTGT
, mode
, inner_mode
, op0
, op1
);
5172 return gen_rtx_IF_THEN_ELSE (mode
, op0
, op1
, op2
);
5175 gcc_assert (COMPLEX_MODE_P (mode
));
5176 if (GET_MODE (op0
) == VOIDmode
)
5177 op0
= gen_rtx_CONST (GET_MODE_INNER (mode
), op0
);
5178 if (GET_MODE (op1
) == VOIDmode
)
5179 op1
= gen_rtx_CONST (GET_MODE_INNER (mode
), op1
);
5180 return gen_rtx_CONCAT (mode
, op0
, op1
);
5183 if (GET_CODE (op0
) == CONCAT
)
5184 return gen_rtx_CONCAT (mode
, XEXP (op0
, 0),
5185 simplify_gen_unary (NEG
, GET_MODE_INNER (mode
),
5187 GET_MODE_INNER (mode
)));
5190 scalar_mode imode
= GET_MODE_INNER (mode
);
5195 re
= adjust_address_nv (op0
, imode
, 0);
5196 im
= adjust_address_nv (op0
, imode
, GET_MODE_SIZE (imode
));
5200 scalar_int_mode ifmode
;
5201 scalar_int_mode ihmode
;
5203 if (!int_mode_for_mode (mode
).exists (&ifmode
)
5204 || !int_mode_for_mode (imode
).exists (&ihmode
))
5206 halfsize
= GEN_INT (GET_MODE_BITSIZE (ihmode
));
5209 re
= gen_rtx_SUBREG (ifmode
, re
, 0);
5210 re
= gen_rtx_ZERO_EXTRACT (ihmode
, re
, halfsize
, const0_rtx
);
5211 if (imode
!= ihmode
)
5212 re
= gen_rtx_SUBREG (imode
, re
, 0);
5213 im
= copy_rtx (op0
);
5215 im
= gen_rtx_SUBREG (ifmode
, im
, 0);
5216 im
= gen_rtx_ZERO_EXTRACT (ihmode
, im
, halfsize
, halfsize
);
5217 if (imode
!= ihmode
)
5218 im
= gen_rtx_SUBREG (imode
, im
, 0);
5220 im
= gen_rtx_NEG (imode
, im
);
5221 return gen_rtx_CONCAT (mode
, re
, im
);
5225 op0
= expand_debug_expr (TREE_OPERAND (exp
, 0));
5226 if (!op0
|| !MEM_P (op0
))
5228 if ((TREE_CODE (TREE_OPERAND (exp
, 0)) == VAR_DECL
5229 || TREE_CODE (TREE_OPERAND (exp
, 0)) == PARM_DECL
5230 || TREE_CODE (TREE_OPERAND (exp
, 0)) == RESULT_DECL
)
5231 && (!TREE_ADDRESSABLE (TREE_OPERAND (exp
, 0))
5232 || target_for_debug_bind (TREE_OPERAND (exp
, 0))))
5233 return gen_rtx_DEBUG_IMPLICIT_PTR (mode
, TREE_OPERAND (exp
, 0));
5235 if (handled_component_p (TREE_OPERAND (exp
, 0)))
5237 poly_int64 bitoffset
, bitsize
, maxsize
, byteoffset
;
5240 = get_ref_base_and_extent (TREE_OPERAND (exp
, 0), &bitoffset
,
5241 &bitsize
, &maxsize
, &reverse
);
5243 || TREE_CODE (decl
) == PARM_DECL
5244 || TREE_CODE (decl
) == RESULT_DECL
)
5245 && (!TREE_ADDRESSABLE (decl
)
5246 || target_for_debug_bind (decl
))
5247 && multiple_p (bitoffset
, BITS_PER_UNIT
, &byteoffset
)
5248 && known_gt (bitsize
, 0)
5249 && known_eq (bitsize
, maxsize
))
5251 rtx base
= gen_rtx_DEBUG_IMPLICIT_PTR (mode
, decl
);
5252 return plus_constant (mode
, base
, byteoffset
);
5256 if (TREE_CODE (TREE_OPERAND (exp
, 0)) == MEM_REF
5257 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp
, 0), 0))
5260 op0
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5263 && (GET_CODE (op0
) == DEBUG_IMPLICIT_PTR
5264 || (GET_CODE (op0
) == PLUS
5265 && GET_CODE (XEXP (op0
, 0)) == DEBUG_IMPLICIT_PTR
5266 && CONST_INT_P (XEXP (op0
, 1)))))
5268 op1
= expand_debug_expr (TREE_OPERAND (TREE_OPERAND (exp
, 0),
5271 if (!op1
|| !poly_int_rtx_p (op1
, &offset
))
5274 return plus_constant (mode
, op0
, offset
);
5281 as
= TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (exp
)));
5282 addr_mode
= SCALAR_INT_TYPE_MODE (TREE_TYPE (exp
));
5283 op0
= convert_debug_memory_address (addr_mode
, XEXP (op0
, 0), as
);
5289 unsigned HOST_WIDE_INT i
, nelts
;
5291 if (!VECTOR_CST_NELTS (exp
).is_constant (&nelts
))
5294 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5296 for (i
= 0; i
< nelts
; ++i
)
5298 op1
= expand_debug_expr (VECTOR_CST_ELT (exp
, i
));
5301 XVECEXP (op0
, 0, i
) = op1
;
5308 if (TREE_CLOBBER_P (exp
))
5310 else if (TREE_CODE (TREE_TYPE (exp
)) == VECTOR_TYPE
)
5313 unsigned HOST_WIDE_INT nelts
;
5316 if (!TYPE_VECTOR_SUBPARTS (TREE_TYPE (exp
)).is_constant (&nelts
))
5317 goto flag_unsupported
;
5319 op0
= gen_rtx_CONCATN (mode
, rtvec_alloc (nelts
));
5321 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (exp
), i
, val
)
5323 op1
= expand_debug_expr (val
);
5326 XVECEXP (op0
, 0, i
) = op1
;
5331 op1
= expand_debug_expr
5332 (build_zero_cst (TREE_TYPE (TREE_TYPE (exp
))));
5337 for (; i
< nelts
; i
++)
5338 XVECEXP (op0
, 0, i
) = op1
;
5344 goto flag_unsupported
;
5347 /* ??? Maybe handle some builtins? */
5352 gimple
*g
= get_gimple_for_ssa_name (exp
);
5356 if (deep_ter_debug_map
)
5358 tree
*slot
= deep_ter_debug_map
->get (exp
);
5363 t
= gimple_assign_rhs_to_tree (g
);
5364 op0
= expand_debug_expr (t
);
5370 /* If this is a reference to an incoming value of
5371 parameter that is never used in the code or where the
5372 incoming value is never used in the code, use
5373 PARM_DECL's DECL_RTL if set. */
5374 if (SSA_NAME_IS_DEFAULT_DEF (exp
)
5375 && SSA_NAME_VAR (exp
)
5376 && TREE_CODE (SSA_NAME_VAR (exp
)) == PARM_DECL
5377 && has_zero_uses (exp
))
5379 op0
= expand_debug_parm_decl (SSA_NAME_VAR (exp
));
5382 op0
= expand_debug_expr (SSA_NAME_VAR (exp
));
5387 int part
= var_to_partition (SA
.map
, exp
);
5389 if (part
== NO_PARTITION
)
5392 gcc_assert (part
>= 0 && (unsigned)part
< SA
.map
->num_partitions
);
5394 op0
= copy_rtx (SA
.partition_to_pseudo
[part
]);
5402 /* Vector stuff. For most of the codes we don't have rtl codes. */
5403 case REALIGN_LOAD_EXPR
:
5405 case VEC_PACK_FIX_TRUNC_EXPR
:
5406 case VEC_PACK_FLOAT_EXPR
:
5407 case VEC_PACK_SAT_EXPR
:
5408 case VEC_PACK_TRUNC_EXPR
:
5409 case VEC_UNPACK_FIX_TRUNC_HI_EXPR
:
5410 case VEC_UNPACK_FIX_TRUNC_LO_EXPR
:
5411 case VEC_UNPACK_FLOAT_HI_EXPR
:
5412 case VEC_UNPACK_FLOAT_LO_EXPR
:
5413 case VEC_UNPACK_HI_EXPR
:
5414 case VEC_UNPACK_LO_EXPR
:
5415 case VEC_WIDEN_MULT_HI_EXPR
:
5416 case VEC_WIDEN_MULT_LO_EXPR
:
5417 case VEC_WIDEN_MULT_EVEN_EXPR
:
5418 case VEC_WIDEN_MULT_ODD_EXPR
:
5419 case VEC_WIDEN_LSHIFT_HI_EXPR
:
5420 case VEC_WIDEN_LSHIFT_LO_EXPR
:
5422 case VEC_DUPLICATE_EXPR
:
5423 case VEC_SERIES_EXPR
:
5428 case ADDR_SPACE_CONVERT_EXPR
:
5429 case FIXED_CONVERT_EXPR
:
5431 case WITH_SIZE_EXPR
:
5432 case BIT_INSERT_EXPR
:
5436 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5437 && SCALAR_INT_MODE_P (mode
))
5440 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5442 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5445 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5447 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op1
,
5449 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5450 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5454 case WIDEN_MULT_EXPR
:
5455 case WIDEN_MULT_PLUS_EXPR
:
5456 case WIDEN_MULT_MINUS_EXPR
:
5457 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5458 && SCALAR_INT_MODE_P (mode
))
5460 inner_mode
= GET_MODE (op0
);
5461 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 0))))
5462 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5464 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5465 if (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
, 1))))
5466 op1
= simplify_gen_unary (ZERO_EXTEND
, mode
, op1
, inner_mode
);
5468 op1
= simplify_gen_unary (SIGN_EXTEND
, mode
, op1
, inner_mode
);
5469 op0
= simplify_gen_binary (MULT
, mode
, op0
, op1
);
5470 if (TREE_CODE (exp
) == WIDEN_MULT_EXPR
)
5472 else if (TREE_CODE (exp
) == WIDEN_MULT_PLUS_EXPR
)
5473 return simplify_gen_binary (PLUS
, mode
, op0
, op2
);
5475 return simplify_gen_binary (MINUS
, mode
, op2
, op0
);
5479 case MULT_HIGHPART_EXPR
:
5480 /* ??? Similar to the above. */
5483 case WIDEN_SUM_EXPR
:
5484 case WIDEN_LSHIFT_EXPR
:
5485 if (SCALAR_INT_MODE_P (GET_MODE (op0
))
5486 && SCALAR_INT_MODE_P (mode
))
5489 = simplify_gen_unary (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (exp
,
5491 ? ZERO_EXTEND
: SIGN_EXTEND
, mode
, op0
,
5493 return simplify_gen_binary (TREE_CODE (exp
) == WIDEN_LSHIFT_EXPR
5494 ? ASHIFT
: PLUS
, mode
, op0
, op1
);
5509 /* Return an RTX equivalent to the source bind value of the tree expression
5513 expand_debug_source_expr (tree exp
)
5516 machine_mode mode
= VOIDmode
, inner_mode
;
5518 switch (TREE_CODE (exp
))
5521 if (DECL_ABSTRACT_ORIGIN (exp
))
5522 return expand_debug_source_expr (DECL_ABSTRACT_ORIGIN (exp
));
5526 mode
= DECL_MODE (exp
);
5527 op0
= expand_debug_parm_decl (exp
);
5530 /* See if this isn't an argument that has been completely
5532 if (!DECL_RTL_SET_P (exp
)
5533 && !DECL_INCOMING_RTL (exp
)
5534 && DECL_ABSTRACT_ORIGIN (current_function_decl
))
5536 tree aexp
= DECL_ORIGIN (exp
);
5537 if (DECL_CONTEXT (aexp
)
5538 == DECL_ABSTRACT_ORIGIN (current_function_decl
))
5540 vec
<tree
, va_gc
> **debug_args
;
5543 debug_args
= decl_debug_args_lookup (current_function_decl
);
5544 if (debug_args
!= NULL
)
5546 for (ix
= 0; vec_safe_iterate (*debug_args
, ix
, &ddecl
);
5549 return gen_rtx_DEBUG_PARAMETER_REF (mode
, aexp
);
5559 if (op0
== NULL_RTX
)
5562 inner_mode
= GET_MODE (op0
);
5563 if (mode
== inner_mode
)
5566 if (FLOAT_MODE_P (mode
) && FLOAT_MODE_P (inner_mode
))
5568 if (GET_MODE_UNIT_BITSIZE (mode
)
5569 == GET_MODE_UNIT_BITSIZE (inner_mode
))
5570 op0
= simplify_gen_subreg (mode
, op0
, inner_mode
, 0);
5571 else if (GET_MODE_UNIT_BITSIZE (mode
)
5572 < GET_MODE_UNIT_BITSIZE (inner_mode
))
5573 op0
= simplify_gen_unary (FLOAT_TRUNCATE
, mode
, op0
, inner_mode
);
5575 op0
= simplify_gen_unary (FLOAT_EXTEND
, mode
, op0
, inner_mode
);
5577 else if (FLOAT_MODE_P (mode
))
5579 else if (FLOAT_MODE_P (inner_mode
))
5581 if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5582 op0
= simplify_gen_unary (UNSIGNED_FIX
, mode
, op0
, inner_mode
);
5584 op0
= simplify_gen_unary (FIX
, mode
, op0
, inner_mode
);
5586 else if (GET_MODE_UNIT_PRECISION (mode
)
5587 == GET_MODE_UNIT_PRECISION (inner_mode
))
5588 op0
= lowpart_subreg (mode
, op0
, inner_mode
);
5589 else if (GET_MODE_UNIT_PRECISION (mode
)
5590 < GET_MODE_UNIT_PRECISION (inner_mode
))
5591 op0
= simplify_gen_unary (TRUNCATE
, mode
, op0
, inner_mode
);
5592 else if (TYPE_UNSIGNED (TREE_TYPE (exp
)))
5593 op0
= simplify_gen_unary (ZERO_EXTEND
, mode
, op0
, inner_mode
);
5595 op0
= simplify_gen_unary (SIGN_EXTEND
, mode
, op0
, inner_mode
);
5600 /* Ensure INSN_VAR_LOCATION_LOC (insn) doesn't have unbound complexity.
5601 Allow 4 levels of rtl nesting for most rtl codes, and if we see anything
5602 deeper than that, create DEBUG_EXPRs and emit DEBUG_INSNs before INSN. */
5605 avoid_complex_debug_insns (rtx_insn
*insn
, rtx
*exp_p
, int depth
)
5609 if (exp
== NULL_RTX
)
5612 if ((OBJECT_P (exp
) && !MEM_P (exp
)) || GET_CODE (exp
) == CLOBBER
)
5617 /* Create DEBUG_EXPR (and DEBUG_EXPR_DECL). */
5618 rtx dval
= make_debug_expr_from_rtl (exp
);
5620 /* Emit a debug bind insn before INSN. */
5621 rtx bind
= gen_rtx_VAR_LOCATION (GET_MODE (exp
),
5622 DEBUG_EXPR_TREE_DECL (dval
), exp
,
5623 VAR_INIT_STATUS_INITIALIZED
);
5625 emit_debug_insn_before (bind
, insn
);
5630 const char *format_ptr
= GET_RTX_FORMAT (GET_CODE (exp
));
5632 for (i
= 0; i
< GET_RTX_LENGTH (GET_CODE (exp
)); i
++)
5633 switch (*format_ptr
++)
5636 avoid_complex_debug_insns (insn
, &XEXP (exp
, i
), depth
+ 1);
5641 for (j
= 0; j
< XVECLEN (exp
, i
); j
++)
5642 avoid_complex_debug_insns (insn
, &XVECEXP (exp
, i
, j
), depth
+ 1);
5650 /* Expand the _LOCs in debug insns. We run this after expanding all
5651 regular insns, so that any variables referenced in the function
5652 will have their DECL_RTLs set. */
5655 expand_debug_locations (void)
5658 rtx_insn
*last
= get_last_insn ();
5659 int save_strict_alias
= flag_strict_aliasing
;
5661 /* New alias sets while setting up memory attributes cause
5662 -fcompare-debug failures, even though it doesn't bring about any
5664 flag_strict_aliasing
= 0;
5666 for (insn
= get_insns (); insn
; insn
= NEXT_INSN (insn
))
5667 if (DEBUG_BIND_INSN_P (insn
))
5669 tree value
= (tree
)INSN_VAR_LOCATION_LOC (insn
);
5671 rtx_insn
*prev_insn
, *insn2
;
5674 if (value
== NULL_TREE
)
5678 if (INSN_VAR_LOCATION_STATUS (insn
)
5679 == VAR_INIT_STATUS_UNINITIALIZED
)
5680 val
= expand_debug_source_expr (value
);
5681 /* The avoid_deep_ter_for_debug function inserts
5682 debug bind stmts after SSA_NAME definition, with the
5683 SSA_NAME as the whole bind location. Disable temporarily
5684 expansion of that SSA_NAME into the DEBUG_EXPR_DECL
5685 being defined in this DEBUG_INSN. */
5686 else if (deep_ter_debug_map
&& TREE_CODE (value
) == SSA_NAME
)
5688 tree
*slot
= deep_ter_debug_map
->get (value
);
5691 if (*slot
== INSN_VAR_LOCATION_DECL (insn
))
5696 val
= expand_debug_expr (value
);
5698 *slot
= INSN_VAR_LOCATION_DECL (insn
);
5701 val
= expand_debug_expr (value
);
5702 gcc_assert (last
== get_last_insn ());
5706 val
= gen_rtx_UNKNOWN_VAR_LOC ();
5709 mode
= GET_MODE (INSN_VAR_LOCATION (insn
));
5711 gcc_assert (mode
== GET_MODE (val
)
5712 || (GET_MODE (val
) == VOIDmode
5713 && (CONST_SCALAR_INT_P (val
)
5714 || GET_CODE (val
) == CONST_FIXED
5715 || GET_CODE (val
) == LABEL_REF
)));
5718 INSN_VAR_LOCATION_LOC (insn
) = val
;
5719 prev_insn
= PREV_INSN (insn
);
5720 for (insn2
= insn
; insn2
!= prev_insn
; insn2
= PREV_INSN (insn2
))
5721 avoid_complex_debug_insns (insn2
, &INSN_VAR_LOCATION_LOC (insn2
), 0);
5724 flag_strict_aliasing
= save_strict_alias
;
5727 /* Performs swapping operands of commutative operations to expand
5728 the expensive one first. */
5731 reorder_operands (basic_block bb
)
5733 unsigned int *lattice
; /* Hold cost of each statement. */
5734 unsigned int i
= 0, n
= 0;
5735 gimple_stmt_iterator gsi
;
5741 use_operand_p use_p
;
5742 gimple
*def0
, *def1
;
5744 /* Compute cost of each statement using estimate_num_insns. */
5745 stmts
= bb_seq (bb
);
5746 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5748 stmt
= gsi_stmt (gsi
);
5749 if (!is_gimple_debug (stmt
))
5750 gimple_set_uid (stmt
, n
++);
5752 lattice
= XNEWVEC (unsigned int, n
);
5753 for (gsi
= gsi_start (stmts
); !gsi_end_p (gsi
); gsi_next (&gsi
))
5756 stmt
= gsi_stmt (gsi
);
5757 if (is_gimple_debug (stmt
))
5759 cost
= estimate_num_insns (stmt
, &eni_size_weights
);
5761 FOR_EACH_SSA_USE_OPERAND (use_p
, stmt
, iter
, SSA_OP_USE
)
5763 tree use
= USE_FROM_PTR (use_p
);
5765 if (TREE_CODE (use
) != SSA_NAME
)
5767 def_stmt
= get_gimple_for_ssa_name (use
);
5770 lattice
[i
] += lattice
[gimple_uid (def_stmt
)];
5773 if (!is_gimple_assign (stmt
)
5774 || !commutative_tree_code (gimple_assign_rhs_code (stmt
)))
5776 op0
= gimple_op (stmt
, 1);
5777 op1
= gimple_op (stmt
, 2);
5778 if (TREE_CODE (op0
) != SSA_NAME
5779 || TREE_CODE (op1
) != SSA_NAME
)
5781 /* Swap operands if the second one is more expensive. */
5782 def0
= get_gimple_for_ssa_name (op0
);
5783 def1
= get_gimple_for_ssa_name (op1
);
5787 if (!def0
|| lattice
[gimple_uid (def1
)] > lattice
[gimple_uid (def0
)])
5791 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
5793 fprintf (dump_file
, "Swap operands in stmt:\n");
5794 print_gimple_stmt (dump_file
, stmt
, 0, TDF_SLIM
);
5795 fprintf (dump_file
, "Cost left opnd=%d, right opnd=%d\n",
5796 def0
? lattice
[gimple_uid (def0
)] : 0,
5797 lattice
[gimple_uid (def1
)]);
5799 swap_ssa_operands (stmt
, gimple_assign_rhs1_ptr (stmt
),
5800 gimple_assign_rhs2_ptr (stmt
));
5806 /* Expand basic block BB from GIMPLE trees to RTL. */
5809 expand_gimple_basic_block (basic_block bb
, bool disable_tail_calls
)
5811 gimple_stmt_iterator gsi
;
5813 gimple
*stmt
= NULL
;
5814 rtx_note
*note
= NULL
;
5818 bool nondebug_stmt_seen
= false;
5821 fprintf (dump_file
, "\n;; Generating RTL for gimple basic block %d\n",
5824 /* Note that since we are now transitioning from GIMPLE to RTL, we
5825 cannot use the gsi_*_bb() routines because they expect the basic
5826 block to be in GIMPLE, instead of RTL. Therefore, we need to
5827 access the BB sequence directly. */
5829 reorder_operands (bb
);
5830 stmts
= bb_seq (bb
);
5831 bb
->il
.gimple
.seq
= NULL
;
5832 bb
->il
.gimple
.phi_nodes
= NULL
;
5833 rtl_profile_for_bb (bb
);
5834 init_rtl_bb_info (bb
);
5835 bb
->flags
|= BB_RTL
;
5837 /* Remove the RETURN_EXPR if we may fall though to the exit
5839 gsi
= gsi_last (stmts
);
5840 if (!gsi_end_p (gsi
)
5841 && gimple_code (gsi_stmt (gsi
)) == GIMPLE_RETURN
)
5843 greturn
*ret_stmt
= as_a
<greturn
*> (gsi_stmt (gsi
));
5845 gcc_assert (single_succ_p (bb
));
5846 gcc_assert (single_succ (bb
) == EXIT_BLOCK_PTR_FOR_FN (cfun
));
5848 if (bb
->next_bb
== EXIT_BLOCK_PTR_FOR_FN (cfun
)
5849 && !gimple_return_retval (ret_stmt
))
5851 gsi_remove (&gsi
, false);
5852 single_succ_edge (bb
)->flags
|= EDGE_FALLTHRU
;
5856 gsi
= gsi_start (stmts
);
5857 if (!gsi_end_p (gsi
))
5859 stmt
= gsi_stmt (gsi
);
5860 if (gimple_code (stmt
) != GIMPLE_LABEL
)
5864 rtx_code_label
**elt
= lab_rtx_for_bb
->get (bb
);
5868 gcc_checking_assert (!note
);
5869 last
= get_last_insn ();
5873 expand_gimple_stmt (stmt
);
5880 BB_HEAD (bb
) = NEXT_INSN (last
);
5881 if (NOTE_P (BB_HEAD (bb
)))
5882 BB_HEAD (bb
) = NEXT_INSN (BB_HEAD (bb
));
5883 gcc_assert (LABEL_P (BB_HEAD (bb
)));
5884 note
= emit_note_after (NOTE_INSN_BASIC_BLOCK
, BB_HEAD (bb
));
5886 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
5889 BB_HEAD (bb
) = note
= emit_note (NOTE_INSN_BASIC_BLOCK
);
5892 NOTE_BASIC_BLOCK (note
) = bb
;
5894 for (; !gsi_end_p (gsi
); gsi_next (&gsi
))
5898 stmt
= gsi_stmt (gsi
);
5899 if (!is_gimple_debug (stmt
))
5900 nondebug_stmt_seen
= true;
5902 /* If this statement is a non-debug one, and we generate debug
5903 insns, then this one might be the last real use of a TERed
5904 SSA_NAME, but where there are still some debug uses further
5905 down. Expanding the current SSA name in such further debug
5906 uses by their RHS might lead to wrong debug info, as coalescing
5907 might make the operands of such RHS be placed into the same
5908 pseudo as something else. Like so:
5909 a_1 = a_0 + 1; // Assume a_1 is TERed and a_0 is dead
5913 As a_0 and a_2 don't overlap in lifetime, assume they are coalesced.
5914 If we now would expand a_1 by it's RHS (a_0 + 1) in the debug use,
5915 the write to a_2 would actually have clobbered the place which
5918 So, instead of that, we recognize the situation, and generate
5919 debug temporaries at the last real use of TERed SSA names:
5926 if (MAY_HAVE_DEBUG_BIND_INSNS
5928 && !is_gimple_debug (stmt
))
5934 location_t sloc
= curr_insn_location ();
5936 /* Look for SSA names that have their last use here (TERed
5937 names always have only one real use). */
5938 FOR_EACH_SSA_TREE_OPERAND (op
, stmt
, iter
, SSA_OP_USE
)
5939 if ((def
= get_gimple_for_ssa_name (op
)))
5941 imm_use_iterator imm_iter
;
5942 use_operand_p use_p
;
5943 bool have_debug_uses
= false;
5945 FOR_EACH_IMM_USE_FAST (use_p
, imm_iter
, op
)
5947 if (gimple_debug_bind_p (USE_STMT (use_p
)))
5949 have_debug_uses
= true;
5954 if (have_debug_uses
)
5956 /* OP is a TERed SSA name, with DEF its defining
5957 statement, and where OP is used in further debug
5958 instructions. Generate a debug temporary, and
5959 replace all uses of OP in debug insns with that
5962 tree value
= gimple_assign_rhs_to_tree (def
);
5963 tree vexpr
= build_debug_expr_decl (TREE_TYPE (value
));
5967 set_curr_insn_location (gimple_location (def
));
5970 mode
= DECL_MODE (value
);
5972 mode
= TYPE_MODE (TREE_TYPE (value
));
5973 /* FIXME: Is setting the mode really necessary? */
5974 SET_DECL_MODE (vexpr
, mode
);
5976 val
= gen_rtx_VAR_LOCATION
5977 (mode
, vexpr
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
5979 emit_debug_insn (val
);
5981 FOR_EACH_IMM_USE_STMT (debugstmt
, imm_iter
, op
)
5983 if (!gimple_debug_bind_p (debugstmt
))
5986 FOR_EACH_IMM_USE_ON_STMT (use_p
, imm_iter
)
5987 SET_USE (use_p
, vexpr
);
5989 update_stmt (debugstmt
);
5993 set_curr_insn_location (sloc
);
5996 currently_expanding_gimple_stmt
= stmt
;
5998 /* Expand this statement, then evaluate the resulting RTL and
5999 fixup the CFG accordingly. */
6000 if (gimple_code (stmt
) == GIMPLE_COND
)
6002 new_bb
= expand_gimple_cond (bb
, as_a
<gcond
*> (stmt
));
6005 currently_expanding_gimple_stmt
= NULL
;
6009 else if (is_gimple_debug (stmt
))
6011 location_t sloc
= curr_insn_location ();
6012 gimple_stmt_iterator nsi
= gsi
;
6017 tree value
= NULL_TREE
;
6021 if (!gimple_debug_nonbind_marker_p (stmt
))
6023 if (gimple_debug_bind_p (stmt
))
6025 var
= gimple_debug_bind_get_var (stmt
);
6027 if (TREE_CODE (var
) != DEBUG_EXPR_DECL
6028 && TREE_CODE (var
) != LABEL_DECL
6029 && !target_for_debug_bind (var
))
6030 goto delink_debug_stmt
;
6032 if (DECL_P (var
) && !VECTOR_TYPE_P (TREE_TYPE (var
)))
6033 mode
= DECL_MODE (var
);
6035 mode
= TYPE_MODE (TREE_TYPE (var
));
6037 if (gimple_debug_bind_has_value_p (stmt
))
6038 value
= gimple_debug_bind_get_value (stmt
);
6040 val
= gen_rtx_VAR_LOCATION
6041 (mode
, var
, (rtx
)value
, VAR_INIT_STATUS_INITIALIZED
);
6043 else if (gimple_debug_source_bind_p (stmt
))
6045 var
= gimple_debug_source_bind_get_var (stmt
);
6047 value
= gimple_debug_source_bind_get_value (stmt
);
6049 if (!VECTOR_TYPE_P (TREE_TYPE (var
)))
6050 mode
= DECL_MODE (var
);
6052 mode
= TYPE_MODE (TREE_TYPE (var
));
6054 val
= gen_rtx_VAR_LOCATION (mode
, var
, (rtx
)value
,
6055 VAR_INIT_STATUS_UNINITIALIZED
);
6060 /* If this function was first compiled with markers
6061 enabled, but they're now disable (e.g. LTO), drop
6062 them on the floor. */
6063 else if (gimple_debug_nonbind_marker_p (stmt
)
6064 && !MAY_HAVE_DEBUG_MARKER_INSNS
)
6065 goto delink_debug_stmt
;
6066 else if (gimple_debug_begin_stmt_p (stmt
))
6067 val
= GEN_RTX_DEBUG_MARKER_BEGIN_STMT_PAT ();
6068 else if (gimple_debug_inline_entry_p (stmt
))
6069 val
= GEN_RTX_DEBUG_MARKER_INLINE_ENTRY_PAT ();
6073 last
= get_last_insn ();
6075 set_curr_insn_location (gimple_location (stmt
));
6077 emit_debug_insn (val
);
6079 if (dump_file
&& (dump_flags
& TDF_DETAILS
))
6081 /* We can't dump the insn with a TREE where an RTX
6083 if (GET_CODE (val
) == VAR_LOCATION
)
6085 gcc_checking_assert (PAT_VAR_LOCATION_LOC (val
) == (rtx
)value
);
6086 PAT_VAR_LOCATION_LOC (val
) = const0_rtx
;
6088 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
6089 if (GET_CODE (val
) == VAR_LOCATION
)
6090 PAT_VAR_LOCATION_LOC (val
) = (rtx
)value
;
6094 /* In order not to generate too many debug temporaries,
6095 we delink all uses of debug statements we already expanded.
6096 Therefore debug statements between definition and real
6097 use of TERed SSA names will continue to use the SSA name,
6098 and not be replaced with debug temps. */
6099 delink_stmt_imm_use (stmt
);
6103 if (gsi_end_p (nsi
))
6105 stmt
= gsi_stmt (nsi
);
6106 if (!is_gimple_debug (stmt
))
6110 set_curr_insn_location (sloc
);
6114 gcall
*call_stmt
= dyn_cast
<gcall
*> (stmt
);
6116 && gimple_call_tail_p (call_stmt
)
6117 && disable_tail_calls
)
6118 gimple_call_set_tail (call_stmt
, false);
6120 if (call_stmt
&& gimple_call_tail_p (call_stmt
))
6123 new_bb
= expand_gimple_tailcall (bb
, call_stmt
, &can_fallthru
);
6130 currently_expanding_gimple_stmt
= NULL
;
6137 def_operand_p def_p
;
6138 def_p
= SINGLE_SSA_DEF_OPERAND (stmt
, SSA_OP_DEF
);
6142 /* Ignore this stmt if it is in the list of
6143 replaceable expressions. */
6145 && bitmap_bit_p (SA
.values
,
6146 SSA_NAME_VERSION (DEF_FROM_PTR (def_p
))))
6149 last
= expand_gimple_stmt (stmt
);
6150 maybe_dump_rtl_for_gimple_stmt (stmt
, last
);
6155 currently_expanding_gimple_stmt
= NULL
;
6157 /* Expand implicit goto and convert goto_locus. */
6158 FOR_EACH_EDGE (e
, ei
, bb
->succs
)
6160 if (e
->goto_locus
!= UNKNOWN_LOCATION
|| !nondebug_stmt_seen
)
6161 set_curr_insn_location (e
->goto_locus
);
6162 if ((e
->flags
& EDGE_FALLTHRU
) && e
->dest
!= bb
->next_bb
)
6164 emit_jump (label_rtx_for_bb (e
->dest
));
6165 e
->flags
&= ~EDGE_FALLTHRU
;
6169 /* Expanded RTL can create a jump in the last instruction of block.
6170 This later might be assumed to be a jump to successor and break edge insertion.
6171 We need to insert dummy move to prevent this. PR41440. */
6172 if (single_succ_p (bb
)
6173 && (single_succ_edge (bb
)->flags
& EDGE_FALLTHRU
)
6174 && (last
= get_last_insn ())
6176 || (DEBUG_INSN_P (last
)
6177 && JUMP_P (prev_nondebug_insn (last
)))))
6179 rtx dummy
= gen_reg_rtx (SImode
);
6180 emit_insn_after_noloc (gen_move_insn (dummy
, dummy
), last
, NULL
);
6183 do_pending_stack_adjust ();
6185 /* Find the block tail. The last insn in the block is the insn
6186 before a barrier and/or table jump insn. */
6187 last
= get_last_insn ();
6188 if (BARRIER_P (last
))
6189 last
= PREV_INSN (last
);
6190 if (JUMP_TABLE_DATA_P (last
))
6191 last
= PREV_INSN (PREV_INSN (last
));
6192 if (BARRIER_P (last
))
6193 last
= PREV_INSN (last
);
6196 update_bb_for_insn (bb
);
6202 /* Create a basic block for initialization code. */
6205 construct_init_block (void)
6207 basic_block init_block
, first_block
;
6211 /* Multiple entry points not supported yet. */
6212 gcc_assert (EDGE_COUNT (ENTRY_BLOCK_PTR_FOR_FN (cfun
)->succs
) == 1);
6213 init_rtl_bb_info (ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6214 init_rtl_bb_info (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6215 ENTRY_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
6216 EXIT_BLOCK_PTR_FOR_FN (cfun
)->flags
|= BB_RTL
;
6218 e
= EDGE_SUCC (ENTRY_BLOCK_PTR_FOR_FN (cfun
), 0);
6220 /* When entry edge points to first basic block, we don't need jump,
6221 otherwise we have to jump into proper target. */
6222 if (e
&& e
->dest
!= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->next_bb
)
6224 tree label
= gimple_block_label (e
->dest
);
6226 emit_jump (jump_target_rtx (label
));
6230 flags
= EDGE_FALLTHRU
;
6232 init_block
= create_basic_block (NEXT_INSN (get_insns ()),
6234 ENTRY_BLOCK_PTR_FOR_FN (cfun
));
6235 init_block
->count
= ENTRY_BLOCK_PTR_FOR_FN (cfun
)->count
;
6236 add_bb_to_loop (init_block
, ENTRY_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6239 first_block
= e
->dest
;
6240 redirect_edge_succ (e
, init_block
);
6241 make_single_succ_edge (init_block
, first_block
, flags
);
6244 make_single_succ_edge (init_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6247 update_bb_for_insn (init_block
);
6251 /* For each lexical block, set BLOCK_NUMBER to the depth at which it is
6252 found in the block tree. */
6255 set_block_levels (tree block
, int level
)
6259 BLOCK_NUMBER (block
) = level
;
6260 set_block_levels (BLOCK_SUBBLOCKS (block
), level
+ 1);
6261 block
= BLOCK_CHAIN (block
);
6265 /* Create a block containing landing pads and similar stuff. */
6268 construct_exit_block (void)
6270 rtx_insn
*head
= get_last_insn ();
6272 basic_block exit_block
;
6276 basic_block prev_bb
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->prev_bb
;
6277 rtx_insn
*orig_end
= BB_END (prev_bb
);
6279 rtl_profile_for_bb (EXIT_BLOCK_PTR_FOR_FN (cfun
));
6281 /* Make sure the locus is set to the end of the function, so that
6282 epilogue line numbers and warnings are set properly. */
6283 if (LOCATION_LOCUS (cfun
->function_end_locus
) != UNKNOWN_LOCATION
)
6284 input_location
= cfun
->function_end_locus
;
6286 /* Generate rtl for function exit. */
6287 expand_function_end ();
6289 end
= get_last_insn ();
6292 /* While emitting the function end we could move end of the last basic
6294 BB_END (prev_bb
) = orig_end
;
6295 while (NEXT_INSN (head
) && NOTE_P (NEXT_INSN (head
)))
6296 head
= NEXT_INSN (head
);
6297 /* But make sure exit_block starts with RETURN_LABEL, otherwise the
6298 bb count counting will be confused. Any instructions before that
6299 label are emitted for the case where PREV_BB falls through into the
6300 exit block, so append those instructions to prev_bb in that case. */
6301 if (NEXT_INSN (head
) != return_label
)
6303 while (NEXT_INSN (head
) != return_label
)
6305 if (!NOTE_P (NEXT_INSN (head
)))
6306 BB_END (prev_bb
) = NEXT_INSN (head
);
6307 head
= NEXT_INSN (head
);
6310 exit_block
= create_basic_block (NEXT_INSN (head
), end
, prev_bb
);
6311 exit_block
->count
= EXIT_BLOCK_PTR_FOR_FN (cfun
)->count
;
6312 add_bb_to_loop (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->loop_father
);
6315 while (ix
< EDGE_COUNT (EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
))
6317 e
= EDGE_PRED (EXIT_BLOCK_PTR_FOR_FN (cfun
), ix
);
6318 if (!(e
->flags
& EDGE_ABNORMAL
))
6319 redirect_edge_succ (e
, exit_block
);
6324 e
= make_single_succ_edge (exit_block
, EXIT_BLOCK_PTR_FOR_FN (cfun
),
6326 FOR_EACH_EDGE (e2
, ei
, EXIT_BLOCK_PTR_FOR_FN (cfun
)->preds
)
6329 exit_block
->count
-= e2
->count ();
6331 update_bb_for_insn (exit_block
);
6334 /* Helper function for discover_nonconstant_array_refs.
6335 Look for ARRAY_REF nodes with non-constant indexes and mark them
6339 discover_nonconstant_array_refs_r (tree
* tp
, int *walk_subtrees
,
6343 bitmap forced_stack_vars
= (bitmap
)((walk_stmt_info
*)data
)->info
;
6345 if (IS_TYPE_OR_DECL_P (t
))
6347 else if (REFERENCE_CLASS_P (t
) && TREE_THIS_VOLATILE (t
))
6349 t
= get_base_address (t
);
6351 && DECL_MODE (t
) != BLKmode
6352 && !TREE_ADDRESSABLE (t
))
6353 bitmap_set_bit (forced_stack_vars
, DECL_UID (t
));
6356 else if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6358 while (((TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6359 && is_gimple_min_invariant (TREE_OPERAND (t
, 1))
6360 && (!TREE_OPERAND (t
, 2)
6361 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6362 || (TREE_CODE (t
) == COMPONENT_REF
6363 && (!TREE_OPERAND (t
,2)
6364 || is_gimple_min_invariant (TREE_OPERAND (t
, 2))))
6365 || TREE_CODE (t
) == BIT_FIELD_REF
6366 || TREE_CODE (t
) == REALPART_EXPR
6367 || TREE_CODE (t
) == IMAGPART_EXPR
6368 || TREE_CODE (t
) == VIEW_CONVERT_EXPR
6369 || CONVERT_EXPR_P (t
))
6370 t
= TREE_OPERAND (t
, 0);
6372 if (TREE_CODE (t
) == ARRAY_REF
|| TREE_CODE (t
) == ARRAY_RANGE_REF
)
6374 t
= get_base_address (t
);
6376 && DECL_MODE (t
) != BLKmode
6377 && !TREE_ADDRESSABLE (t
))
6378 bitmap_set_bit (forced_stack_vars
, DECL_UID (t
));
6383 /* References of size POLY_INT_CST to a fixed-size object must go
6384 through memory. It's more efficient to force that here than
6385 to create temporary slots on the fly.
6386 RTL expansion expectes TARGET_MEM_REF to always address actual memory.
6387 Also, force to stack non-BLKmode vars accessed through VIEW_CONVERT_EXPR
6389 else if (TREE_CODE (t
) == TARGET_MEM_REF
6390 || (TREE_CODE (t
) == MEM_REF
6391 && TYPE_SIZE (TREE_TYPE (t
))
6392 && POLY_INT_CST_P (TYPE_SIZE (TREE_TYPE (t
))))
6393 || (TREE_CODE (t
) == VIEW_CONVERT_EXPR
6394 && TYPE_MODE (TREE_TYPE (t
)) == BLKmode
))
6396 tree base
= get_base_address (t
);
6399 && !TREE_ADDRESSABLE (base
)
6400 && DECL_MODE (base
) != BLKmode
6401 && GET_MODE_SIZE (DECL_MODE (base
)).is_constant ())
6402 bitmap_set_bit (forced_stack_vars
, DECL_UID (base
));
6409 /* If there's a chance to get a pseudo for t then if it would be of float mode
6410 and the actual access is via an integer mode (lowered memcpy or similar
6411 access) then avoid the register expansion if the mode likely is not storage
6412 suitable for raw bits processing (like XFmode on i?86). */
6415 avoid_type_punning_on_regs (tree t
, bitmap forced_stack_vars
)
6417 machine_mode access_mode
= TYPE_MODE (TREE_TYPE (t
));
6418 if (access_mode
!= BLKmode
6419 && !SCALAR_INT_MODE_P (access_mode
))
6421 tree base
= get_base_address (t
);
6423 && !TREE_ADDRESSABLE (base
)
6424 && FLOAT_MODE_P (DECL_MODE (base
))
6425 && maybe_lt (GET_MODE_PRECISION (DECL_MODE (base
)),
6426 GET_MODE_BITSIZE (GET_MODE_INNER (DECL_MODE (base
))))
6427 /* Double check in the expensive way we really would get a pseudo. */
6428 && use_register_for_decl (base
))
6429 bitmap_set_bit (forced_stack_vars
, DECL_UID (base
));
6432 /* RTL expansion is not able to compile array references with variable
6433 offsets for arrays stored in single register. Discover such
6434 expressions and mark variables as addressable to avoid this
6438 discover_nonconstant_array_refs (bitmap forced_stack_vars
)
6441 gimple_stmt_iterator gsi
;
6443 walk_stmt_info wi
= {};
6444 wi
.info
= forced_stack_vars
;
6445 FOR_EACH_BB_FN (bb
, cfun
)
6446 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6448 gimple
*stmt
= gsi_stmt (gsi
);
6449 if (!is_gimple_debug (stmt
))
6451 walk_gimple_op (stmt
, discover_nonconstant_array_refs_r
, &wi
);
6452 gcall
*call
= dyn_cast
<gcall
*> (stmt
);
6453 if (call
&& gimple_call_internal_p (call
))
6455 tree cand
= NULL_TREE
;
6456 switch (gimple_call_internal_fn (call
))
6458 case IFN_LOAD_LANES
:
6459 /* The source must be a MEM. */
6460 cand
= gimple_call_arg (call
, 0);
6462 case IFN_STORE_LANES
:
6463 /* The destination must be a MEM. */
6464 cand
= gimple_call_lhs (call
);
6470 cand
= get_base_address (cand
);
6473 && use_register_for_decl (cand
))
6474 bitmap_set_bit (forced_stack_vars
, DECL_UID (cand
));
6476 if (gimple_vdef (stmt
))
6478 tree t
= gimple_get_lhs (stmt
);
6479 if (t
&& REFERENCE_CLASS_P (t
))
6480 avoid_type_punning_on_regs (t
, forced_stack_vars
);
6486 /* This function sets crtl->args.internal_arg_pointer to a virtual
6487 register if DRAP is needed. Local register allocator will replace
6488 virtual_incoming_args_rtx with the virtual register. */
6491 expand_stack_alignment (void)
6494 unsigned int preferred_stack_boundary
;
6496 if (! SUPPORTS_STACK_ALIGNMENT
)
6499 if (cfun
->calls_alloca
6500 || cfun
->has_nonlocal_label
6501 || crtl
->has_nonlocal_goto
)
6502 crtl
->need_drap
= true;
6504 /* Call update_stack_boundary here again to update incoming stack
6505 boundary. It may set incoming stack alignment to a different
6506 value after RTL expansion. TARGET_FUNCTION_OK_FOR_SIBCALL may
6507 use the minimum incoming stack alignment to check if it is OK
6508 to perform sibcall optimization since sibcall optimization will
6509 only align the outgoing stack to incoming stack boundary. */
6510 if (targetm
.calls
.update_stack_boundary
)
6511 targetm
.calls
.update_stack_boundary ();
6513 /* The incoming stack frame has to be aligned at least at
6514 parm_stack_boundary. */
6515 gcc_assert (crtl
->parm_stack_boundary
<= INCOMING_STACK_BOUNDARY
);
6517 /* Update crtl->stack_alignment_estimated and use it later to align
6518 stack. We check PREFERRED_STACK_BOUNDARY if there may be non-call
6519 exceptions since callgraph doesn't collect incoming stack alignment
6521 if (cfun
->can_throw_non_call_exceptions
6522 && PREFERRED_STACK_BOUNDARY
> crtl
->preferred_stack_boundary
)
6523 preferred_stack_boundary
= PREFERRED_STACK_BOUNDARY
;
6525 preferred_stack_boundary
= crtl
->preferred_stack_boundary
;
6526 if (preferred_stack_boundary
> crtl
->stack_alignment_estimated
)
6527 crtl
->stack_alignment_estimated
= preferred_stack_boundary
;
6528 if (preferred_stack_boundary
> crtl
->stack_alignment_needed
)
6529 crtl
->stack_alignment_needed
= preferred_stack_boundary
;
6531 gcc_assert (crtl
->stack_alignment_needed
6532 <= crtl
->stack_alignment_estimated
);
6534 crtl
->stack_realign_needed
6535 = INCOMING_STACK_BOUNDARY
< crtl
->stack_alignment_estimated
;
6536 crtl
->stack_realign_tried
= crtl
->stack_realign_needed
;
6538 crtl
->stack_realign_processed
= true;
6540 /* Target has to redefine TARGET_GET_DRAP_RTX to support stack
6542 gcc_assert (targetm
.calls
.get_drap_rtx
!= NULL
);
6543 drap_rtx
= targetm
.calls
.get_drap_rtx ();
6545 /* stack_realign_drap and drap_rtx must match. */
6546 gcc_assert ((stack_realign_drap
!= 0) == (drap_rtx
!= NULL
));
6548 /* Do nothing if NULL is returned, which means DRAP is not needed. */
6549 if (drap_rtx
!= NULL
)
6551 crtl
->args
.internal_arg_pointer
= drap_rtx
;
6553 /* Call fixup_tail_calls to clean up REG_EQUIV note if DRAP is
6555 fixup_tail_calls ();
6561 expand_main_function (void)
6563 #if (defined(INVOKE__main) \
6564 || (!defined(HAS_INIT_SECTION) \
6565 && !defined(INIT_SECTION_ASM_OP) \
6566 && !defined(INIT_ARRAY_SECTION_ASM_OP)))
6567 emit_library_call (init_one_libfunc (NAME__MAIN
), LCT_NORMAL
, VOIDmode
);
6572 /* Expand code to initialize the stack_protect_guard. This is invoked at
6573 the beginning of a function to be protected. */
6576 stack_protect_prologue (void)
6578 tree guard_decl
= targetm
.stack_protect_guard ();
6581 crtl
->stack_protect_guard_decl
= guard_decl
;
6582 x
= expand_normal (crtl
->stack_protect_guard
);
6584 if (targetm
.have_stack_protect_combined_set () && guard_decl
)
6586 gcc_assert (DECL_P (guard_decl
));
6587 y
= DECL_RTL (guard_decl
);
6589 /* Allow the target to compute address of Y and copy it to X without
6590 leaking Y into a register. This combined address + copy pattern
6591 allows the target to prevent spilling of any intermediate results by
6592 splitting it after register allocator. */
6593 if (rtx_insn
*insn
= targetm
.gen_stack_protect_combined_set (x
, y
))
6601 y
= expand_normal (guard_decl
);
6605 /* Allow the target to copy from Y to X without leaking Y into a
6607 if (targetm
.have_stack_protect_set ())
6608 if (rtx_insn
*insn
= targetm
.gen_stack_protect_set (x
, y
))
6614 /* Otherwise do a straight move. */
6615 emit_move_insn (x
, y
);
6618 /* Translate the intermediate representation contained in the CFG
6619 from GIMPLE trees to RTL.
6621 We do conversion per basic block and preserve/update the tree CFG.
6622 This implies we have to do some magic as the CFG can simultaneously
6623 consist of basic blocks containing RTL and GIMPLE trees. This can
6624 confuse the CFG hooks, so be careful to not manipulate CFG during
6629 const pass_data pass_data_expand
=
6631 RTL_PASS
, /* type */
6632 "expand", /* name */
6633 OPTGROUP_NONE
, /* optinfo_flags */
6634 TV_EXPAND
, /* tv_id */
6635 ( PROP_ssa
| PROP_gimple_leh
| PROP_cfg
6638 | PROP_gimple_lva
), /* properties_required */
6639 PROP_rtl
, /* properties_provided */
6640 ( PROP_ssa
| PROP_gimple
), /* properties_destroyed */
6641 0, /* todo_flags_start */
6642 0, /* todo_flags_finish */
6645 class pass_expand
: public rtl_opt_pass
6648 pass_expand (gcc::context
*ctxt
)
6649 : rtl_opt_pass (pass_data_expand
, ctxt
)
6652 /* opt_pass methods: */
6653 unsigned int execute (function
*) final override
;
6655 }; // class pass_expand
6658 pass_expand::execute (function
*fun
)
6660 basic_block bb
, init_block
;
6663 rtx_insn
*var_seq
, *var_ret_seq
;
6666 timevar_push (TV_OUT_OF_SSA
);
6667 rewrite_out_of_ssa (&SA
);
6668 timevar_pop (TV_OUT_OF_SSA
);
6669 SA
.partition_to_pseudo
= XCNEWVEC (rtx
, SA
.map
->num_partitions
);
6671 if (MAY_HAVE_DEBUG_BIND_STMTS
&& flag_tree_ter
)
6673 gimple_stmt_iterator gsi
;
6674 FOR_EACH_BB_FN (bb
, cfun
)
6675 for (gsi
= gsi_start_bb (bb
); !gsi_end_p (gsi
); gsi_next (&gsi
))
6676 if (gimple_debug_bind_p (gsi_stmt (gsi
)))
6677 avoid_deep_ter_for_debug (gsi_stmt (gsi
), 0);
6680 /* Mark arrays indexed with non-constant indices with TREE_ADDRESSABLE. */
6681 auto_bitmap forced_stack_vars
;
6682 discover_nonconstant_array_refs (forced_stack_vars
);
6684 /* Make sure all values used by the optimization passes have sane
6688 /* Some backends want to know that we are expanding to RTL. */
6689 currently_expanding_to_rtl
= 1;
6690 /* Dominators are not kept up-to-date as we may create new basic-blocks. */
6691 free_dominance_info (CDI_DOMINATORS
);
6693 rtl_profile_for_bb (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6695 insn_locations_init ();
6696 if (!DECL_IS_UNDECLARED_BUILTIN (current_function_decl
))
6698 /* Eventually, all FEs should explicitly set function_start_locus. */
6699 if (LOCATION_LOCUS (fun
->function_start_locus
) == UNKNOWN_LOCATION
)
6700 set_curr_insn_location
6701 (DECL_SOURCE_LOCATION (current_function_decl
));
6703 set_curr_insn_location (fun
->function_start_locus
);
6706 set_curr_insn_location (UNKNOWN_LOCATION
);
6707 prologue_location
= curr_insn_location ();
6709 #ifdef INSN_SCHEDULING
6710 init_sched_attrs ();
6713 /* Make sure first insn is a note even if we don't want linenums.
6714 This makes sure the first insn will never be deleted.
6715 Also, final expects a note to appear there. */
6716 emit_note (NOTE_INSN_DELETED
);
6718 targetm
.expand_to_rtl_hook ();
6719 crtl
->init_stack_alignment ();
6720 fun
->cfg
->max_jumptable_ents
= 0;
6722 /* Resovle the function section. Some targets, like ARM EABI rely on knowledge
6723 of the function section at exapnsion time to predict distance of calls. */
6724 resolve_unique_section (current_function_decl
, 0, flag_function_sections
);
6726 /* Expand the variables recorded during gimple lowering. */
6727 timevar_push (TV_VAR_EXPAND
);
6730 var_ret_seq
= expand_used_vars (forced_stack_vars
);
6732 var_seq
= get_insns ();
6734 timevar_pop (TV_VAR_EXPAND
);
6736 /* Honor stack protection warnings. */
6737 if (warn_stack_protect
)
6739 if (fun
->calls_alloca
)
6740 warning (OPT_Wstack_protector
,
6741 "stack protector not protecting local variables: "
6742 "variable length buffer");
6743 if (has_short_buffer
&& !crtl
->stack_protect_guard
)
6744 warning (OPT_Wstack_protector
,
6745 "stack protector not protecting function: "
6746 "all local arrays are less than %d bytes long",
6747 (int) param_ssp_buffer_size
);
6750 /* Temporarily mark PARM_DECLs and RESULT_DECLs we need to expand to
6751 memory addressable so expand_function_start can emit the required
6753 auto_vec
<tree
, 16> marked_parms
;
6754 for (tree parm
= DECL_ARGUMENTS (current_function_decl
); parm
;
6755 parm
= DECL_CHAIN (parm
))
6756 if (!TREE_ADDRESSABLE (parm
)
6757 && bitmap_bit_p (forced_stack_vars
, DECL_UID (parm
)))
6759 TREE_ADDRESSABLE (parm
) = 1;
6760 marked_parms
.safe_push (parm
);
6762 if (DECL_RESULT (current_function_decl
)
6763 && !TREE_ADDRESSABLE (DECL_RESULT (current_function_decl
))
6764 && bitmap_bit_p (forced_stack_vars
,
6765 DECL_UID (DECL_RESULT (current_function_decl
))))
6767 TREE_ADDRESSABLE (DECL_RESULT (current_function_decl
)) = 1;
6768 marked_parms
.safe_push (DECL_RESULT (current_function_decl
));
6771 /* Set up parameters and prepare for return, for the function. */
6772 expand_function_start (current_function_decl
);
6774 /* Clear TREE_ADDRESSABLE again. */
6775 while (!marked_parms
.is_empty ())
6776 TREE_ADDRESSABLE (marked_parms
.pop ()) = 0;
6778 /* If we emitted any instructions for setting up the variables,
6779 emit them before the FUNCTION_START note. */
6782 emit_insn_before (var_seq
, parm_birth_insn
);
6784 /* In expand_function_end we'll insert the alloca save/restore
6785 before parm_birth_insn. We've just insertted an alloca call.
6786 Adjust the pointer to match. */
6787 parm_birth_insn
= var_seq
;
6790 /* Now propagate the RTL assignment of each partition to the
6791 underlying var of each SSA_NAME. */
6794 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6796 /* We might have generated new SSA names in
6797 update_alias_info_with_stack_vars. They will have a NULL
6798 defining statements, and won't be part of the partitioning,
6800 if (!SSA_NAME_DEF_STMT (name
))
6803 adjust_one_expanded_partition_var (name
);
6806 /* Clean up RTL of variables that straddle across multiple
6807 partitions, and check that the rtl of any PARM_DECLs that are not
6808 cleaned up is that of their default defs. */
6809 FOR_EACH_SSA_NAME (i
, name
, cfun
)
6813 /* We might have generated new SSA names in
6814 update_alias_info_with_stack_vars. They will have a NULL
6815 defining statements, and won't be part of the partitioning,
6817 if (!SSA_NAME_DEF_STMT (name
))
6819 part
= var_to_partition (SA
.map
, name
);
6820 if (part
== NO_PARTITION
)
6823 /* If this decl was marked as living in multiple places, reset
6824 this now to NULL. */
6825 tree var
= SSA_NAME_VAR (name
);
6826 if (var
&& DECL_RTL_IF_SET (var
) == pc_rtx
)
6827 SET_DECL_RTL (var
, NULL
);
6828 /* Check that the pseudos chosen by assign_parms are those of
6829 the corresponding default defs. */
6830 else if (SSA_NAME_IS_DEFAULT_DEF (name
)
6831 && (TREE_CODE (var
) == PARM_DECL
6832 || TREE_CODE (var
) == RESULT_DECL
))
6834 rtx in
= DECL_RTL_IF_SET (var
);
6836 rtx out
= SA
.partition_to_pseudo
[part
];
6837 gcc_assert (in
== out
);
6839 /* Now reset VAR's RTL to IN, so that the _EXPR attrs match
6840 those expected by debug backends for each parm and for
6841 the result. This is particularly important for stabs,
6842 whose register elimination from parm's DECL_RTL may cause
6843 -fcompare-debug differences as SET_DECL_RTL changes reg's
6844 attrs. So, make sure the RTL already has the parm as the
6845 EXPR, so that it won't change. */
6846 SET_DECL_RTL (var
, NULL_RTX
);
6848 set_mem_attributes (in
, var
, true);
6849 SET_DECL_RTL (var
, in
);
6853 /* If this function is `main', emit a call to `__main'
6854 to run global initializers, etc. */
6855 if (DECL_NAME (current_function_decl
)
6856 && MAIN_NAME_P (DECL_NAME (current_function_decl
))
6857 && DECL_FILE_SCOPE_P (current_function_decl
))
6858 expand_main_function ();
6860 /* Initialize the stack_protect_guard field. This must happen after the
6861 call to __main (if any) so that the external decl is initialized. */
6862 if (crtl
->stack_protect_guard
&& targetm
.stack_protect_runtime_enabled_p ())
6863 stack_protect_prologue ();
6865 expand_phi_nodes (&SA
);
6867 /* Release any stale SSA redirection data. */
6868 redirect_edge_var_map_empty ();
6870 /* Register rtl specific functions for cfg. */
6871 rtl_register_cfg_hooks ();
6873 init_block
= construct_init_block ();
6875 /* Clear EDGE_EXECUTABLE on the entry edge(s). It is cleaned from the
6876 remaining edges later. */
6877 FOR_EACH_EDGE (e
, ei
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->succs
)
6878 e
->flags
&= ~EDGE_EXECUTABLE
;
6880 /* If the function has too many markers, drop them while expanding. */
6881 if (cfun
->debug_marker_count
6882 >= param_max_debug_marker_count
)
6883 cfun
->debug_nonbind_markers
= false;
6885 lab_rtx_for_bb
= new hash_map
<basic_block
, rtx_code_label
*>;
6886 FOR_BB_BETWEEN (bb
, init_block
->next_bb
, EXIT_BLOCK_PTR_FOR_FN (fun
),
6888 bb
= expand_gimple_basic_block (bb
, var_ret_seq
!= NULL_RTX
);
6890 if (MAY_HAVE_DEBUG_BIND_INSNS
)
6891 expand_debug_locations ();
6893 if (deep_ter_debug_map
)
6895 delete deep_ter_debug_map
;
6896 deep_ter_debug_map
= NULL
;
6899 /* Free stuff we no longer need after GIMPLE optimizations. */
6900 free_dominance_info (CDI_DOMINATORS
);
6901 free_dominance_info (CDI_POST_DOMINATORS
);
6902 delete_tree_cfg_annotations (fun
);
6904 timevar_push (TV_OUT_OF_SSA
);
6905 finish_out_of_ssa (&SA
);
6906 timevar_pop (TV_OUT_OF_SSA
);
6908 timevar_push (TV_POST_EXPAND
);
6909 /* We are no longer in SSA form. */
6910 fun
->gimple_df
->in_ssa_p
= false;
6911 loops_state_clear (LOOP_CLOSED_SSA
);
6913 /* Expansion is used by optimization passes too, set maybe_hot_insn_p
6914 conservatively to true until they are all profile aware. */
6915 delete lab_rtx_for_bb
;
6916 free_histograms (fun
);
6918 construct_exit_block ();
6919 insn_locations_finalize ();
6923 rtx_insn
*after
= return_label
;
6924 rtx_insn
*next
= NEXT_INSN (after
);
6925 if (next
&& NOTE_INSN_BASIC_BLOCK_P (next
))
6927 emit_insn_after (var_ret_seq
, after
);
6930 if (hwasan_sanitize_stack_p ())
6931 hwasan_maybe_emit_frame_base_init ();
6933 /* Zap the tree EH table. */
6934 set_eh_throw_stmt_table (fun
, NULL
);
6936 /* We need JUMP_LABEL be set in order to redirect jumps, and hence
6937 split edges which edge insertions might do. */
6938 rebuild_jump_labels (get_insns ());
6940 /* If we have a single successor to the entry block, put the pending insns
6941 after parm birth, but before NOTE_INSNS_FUNCTION_BEG. */
6942 if (single_succ_p (ENTRY_BLOCK_PTR_FOR_FN (fun
)))
6944 edge e
= single_succ_edge (ENTRY_BLOCK_PTR_FOR_FN (fun
));
6947 rtx_insn
*insns
= e
->insns
.r
;
6949 rebuild_jump_labels_chain (insns
);
6950 if (NOTE_P (parm_birth_insn
)
6951 && NOTE_KIND (parm_birth_insn
) == NOTE_INSN_FUNCTION_BEG
)
6952 emit_insn_before_noloc (insns
, parm_birth_insn
, e
->dest
);
6954 emit_insn_after_noloc (insns
, parm_birth_insn
, e
->dest
);
6958 /* Otherwise, as well as for other edges, take the usual way. */
6959 commit_edge_insertions ();
6961 /* We're done expanding trees to RTL. */
6962 currently_expanding_to_rtl
= 0;
6964 flush_mark_addressable_queue ();
6966 FOR_BB_BETWEEN (bb
, ENTRY_BLOCK_PTR_FOR_FN (fun
)->next_bb
,
6967 EXIT_BLOCK_PTR_FOR_FN (fun
), next_bb
)
6971 for (ei
= ei_start (bb
->succs
); (e
= ei_safe_edge (ei
)); )
6973 /* Clear EDGE_EXECUTABLE. This flag is never used in the backend. */
6974 e
->flags
&= ~EDGE_EXECUTABLE
;
6976 /* At the moment not all abnormal edges match the RTL
6977 representation. It is safe to remove them here as
6978 find_many_sub_basic_blocks will rediscover them.
6979 In the future we should get this fixed properly. */
6980 if ((e
->flags
& EDGE_ABNORMAL
)
6981 && !(e
->flags
& EDGE_SIBCALL
))
6988 auto_sbitmap
blocks (last_basic_block_for_fn (fun
));
6989 bitmap_ones (blocks
);
6990 find_many_sub_basic_blocks (blocks
);
6991 purge_all_dead_edges ();
6993 /* After initial rtl generation, call back to finish generating
6994 exception support code. We need to do this before cleaning up
6995 the CFG as the code does not expect dead landing pads. */
6996 if (fun
->eh
->region_tree
!= NULL
)
6997 finish_eh_generation ();
6999 /* Call expand_stack_alignment after finishing all
7000 updates to crtl->preferred_stack_boundary. */
7001 expand_stack_alignment ();
7003 /* Fixup REG_EQUIV notes in the prologue if there are tailcalls in this
7005 if (crtl
->tail_call_emit
)
7006 fixup_tail_calls ();
7008 HOST_WIDE_INT patch_area_size
, patch_area_entry
;
7009 parse_and_check_patch_area (flag_patchable_function_entry
, false,
7010 &patch_area_size
, &patch_area_entry
);
7012 tree patchable_function_entry_attr
7013 = lookup_attribute ("patchable_function_entry",
7014 DECL_ATTRIBUTES (cfun
->decl
));
7015 if (patchable_function_entry_attr
)
7017 tree pp_val
= TREE_VALUE (patchable_function_entry_attr
);
7018 tree patchable_function_entry_value1
= TREE_VALUE (pp_val
);
7020 patch_area_size
= tree_to_uhwi (patchable_function_entry_value1
);
7021 patch_area_entry
= 0;
7022 if (TREE_CHAIN (pp_val
) != NULL_TREE
)
7024 tree patchable_function_entry_value2
7025 = TREE_VALUE (TREE_CHAIN (pp_val
));
7026 patch_area_entry
= tree_to_uhwi (patchable_function_entry_value2
);
7030 if (patch_area_entry
> patch_area_size
)
7032 if (patch_area_size
> 0)
7033 warning (OPT_Wattributes
,
7034 "patchable function entry %wu exceeds size %wu",
7035 patch_area_entry
, patch_area_size
);
7036 patch_area_entry
= 0;
7039 crtl
->patch_area_size
= patch_area_size
;
7040 crtl
->patch_area_entry
= patch_area_entry
;
7042 /* BB subdivision may have created basic blocks that are only reachable
7043 from unlikely bbs but not marked as such in the profile. */
7045 propagate_unlikely_bbs_forward ();
7047 /* Remove unreachable blocks, otherwise we cannot compute dominators
7048 which are needed for loop state verification. As a side-effect
7049 this also compacts blocks.
7050 ??? We cannot remove trivially dead insns here as for example
7051 the DRAP reg on i?86 is not magically live at this point.
7052 gcc.c-torture/execute/ipa-sra-2.c execution, -Os -m32 fails otherwise. */
7053 cleanup_cfg (CLEANUP_NO_INSN_DEL
);
7055 checking_verify_flow_info ();
7057 /* Initialize pseudos allocated for hard registers. */
7058 emit_initial_value_sets ();
7060 /* And finally unshare all RTL. */
7063 /* There's no need to defer outputting this function any more; we
7064 know we want to output it. */
7065 DECL_DEFER_OUTPUT (current_function_decl
) = 0;
7067 /* Now that we're done expanding trees to RTL, we shouldn't have any
7068 more CONCATs anywhere. */
7069 generating_concat_p
= 0;
7074 "\n\n;;\n;; Full RTL generated for this function:\n;;\n");
7075 /* And the pass manager will dump RTL for us. */
7078 /* If we're emitting a nested function, make sure its parent gets
7079 emitted as well. Doing otherwise confuses debug info. */
7082 for (parent
= DECL_CONTEXT (current_function_decl
);
7083 parent
!= NULL_TREE
;
7084 parent
= get_containing_scope (parent
))
7085 if (TREE_CODE (parent
) == FUNCTION_DECL
)
7086 TREE_SYMBOL_REFERENCED (DECL_ASSEMBLER_NAME (parent
)) = 1;
7089 TREE_ASM_WRITTEN (current_function_decl
) = 1;
7091 /* After expanding, the return labels are no longer needed. */
7092 return_label
= NULL
;
7093 naked_return_label
= NULL
;
7095 /* After expanding, the tm_restart map is no longer needed. */
7096 if (fun
->gimple_df
->tm_restart
)
7097 fun
->gimple_df
->tm_restart
= NULL
;
7099 /* Tag the blocks with a depth number so that change_scope can find
7100 the common parent easily. */
7101 set_block_levels (DECL_INITIAL (fun
->decl
), 0);
7102 default_rtl_profile ();
7104 /* For -dx discard loops now, otherwise IL verify in clean_state will
7106 if (rtl_dump_and_exit
)
7108 cfun
->curr_properties
&= ~PROP_loops
;
7109 loop_optimizer_finalize ();
7112 timevar_pop (TV_POST_EXPAND
);
7120 make_pass_expand (gcc::context
*ctxt
)
7122 return new pass_expand (ctxt
);